1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #ifndef PUSH_ARGS_REVERSED
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
69 #define STACK_PUSH_CODE PRE_INC
73 /* Assume that case vectors are not pc-relative. */
74 #ifndef CASE_VECTOR_PC_RELATIVE
75 #define CASE_VECTOR_PC_RELATIVE 0
78 /* Convert defined/undefined to boolean. */
79 #ifdef TARGET_MEM_FUNCTIONS
80 #undef TARGET_MEM_FUNCTIONS
81 #define TARGET_MEM_FUNCTIONS 1
83 #define TARGET_MEM_FUNCTIONS 0
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
95 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
96 static tree placeholder_list = 0;
98 /* This structure is used by move_by_pieces to describe the move to
100 struct move_by_pieces
109 int explicit_inc_from;
110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
115 /* This structure is used by store_by_pieces to describe the clear to
118 struct store_by_pieces
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
126 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
131 static rtx enqueue_insn PARAMS ((rtx, rtx));
132 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133 PARAMS ((unsigned HOST_WIDE_INT,
135 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140 static tree emit_block_move_libcall_fn PARAMS ((int));
141 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153 static tree clear_storage_libcall_fn PARAMS ((int));
154 static rtx compress_float_constant PARAMS ((rtx, rtx));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, int, int));
161 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
163 HOST_WIDE_INT, enum machine_mode,
164 tree, enum machine_mode, int, tree,
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169 static int is_aligning_offset PARAMS ((tree, tree));
170 static rtx expand_increment PARAMS ((tree, int, int));
171 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
175 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 /* Record for each mode whether we can move a register directly to or
182 from an object of that mode in memory. If we can't, we won't try
183 to use that mode directly when accessing a field of that mode. */
185 static char direct_load[NUM_MACHINE_MODES];
186 static char direct_store[NUM_MACHINE_MODES];
188 /* Record for each mode whether we can float-extend from memory. */
190 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 /* If we are optimizing for space (-Os), cut down the default move ratio. */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 /* If a clear memory operation would take CLEAR_RATIO or more simple
212 move-instruction sequences, we will do a clrstr or libcall instead. */
215 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
216 #define CLEAR_RATIO 2
218 /* If we are optimizing for space, cut down the default clear ratio. */
219 #define CLEAR_RATIO (optimize_size ? 3 : 15)
223 /* This macro is used to determine whether clear_by_pieces should be
224 called to clear storage. */
225 #ifndef CLEAR_BY_PIECES_P
226 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
227 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab[NUM_MACHINE_MODES];
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
249 enum machine_mode mode;
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
258 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg = gen_rtx_REG (VOIDmode, -1);
264 insn = rtx_alloc (INSN);
265 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
266 PATTERN (insn) = pat;
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
276 PUT_MODE (reg, mode);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
286 if (! HARD_REGNO_MODE_OK (regno, mode))
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
313 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
316 mode = GET_MODE_WIDER_MODE (mode))
318 enum machine_mode srcmode;
319 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
320 srcmode = GET_MODE_WIDER_MODE (srcmode))
324 ic = can_extend_p (mode, srcmode, 0);
325 if (ic == CODE_FOR_nothing)
328 PUT_MODE (mem, srcmode);
330 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
331 float_extend_from_mem[mode][srcmode] = true;
336 /* This is run at the start of compiling a function. */
341 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
344 pending_stack_adjust = 0;
345 stack_pointer_delta = 0;
346 inhibit_defer_pop = 0;
348 apply_args_value = 0;
352 /* Small sanity check that the queue is empty at the end of a function. */
355 finish_expr_for_function ()
361 /* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
364 /* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
372 enqueue_insn (var, body)
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (x, modify)
400 RTX_CODE code = GET_CODE (x);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
423 rtx temp = gen_reg_rtx (GET_MODE (x));
425 emit_insn_before (gen_move_insn (temp, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
446 else if (code == PLUS || code == MULT)
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 enum rtx_code code = GET_CODE (x);
491 return queued_subexp_p (XEXP (x, 0));
495 return (queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1)));
502 /* Perform all the pending incrementations. */
508 while ((p = pending_chain))
510 rtx body = QUEUED_BODY (p);
512 switch (GET_CODE (body))
520 QUEUED_INSN (p) = body;
524 #ifdef ENABLE_CHECKING
531 QUEUED_INSN (p) = emit_insn (body);
535 pending_chain = QUEUED_NEXT (p);
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
545 convert_move (to, from, unsignedp)
549 enum machine_mode to_mode = GET_MODE (to);
550 enum machine_mode from_mode = GET_MODE (from);
551 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
558 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
560 to = protect_from_queue (to, 1);
561 from = protect_from_queue (from, 0);
563 if (to_real != from_real)
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
570 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572 >= GET_MODE_SIZE (to_mode))
573 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574 from = gen_lowpart (to_mode, from), from_mode = to_mode;
576 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
579 if (to_mode == from_mode
580 || (from_mode == VOIDmode && CONSTANT_P (from)))
582 emit_move_insn (to, from);
586 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
588 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
591 if (VECTOR_MODE_P (to_mode))
592 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
594 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
596 emit_move_insn (to, from);
600 if (to_real != from_real)
607 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 /* Try converting directly if the insn is supported. */
610 if ((code = can_extend_p (to_mode, from_mode, 0))
613 emit_unop_insn (code, to, from, UNKNOWN);
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 libcall = extendsfdf2_libfunc;
773 libcall = extendsfxf2_libfunc;
777 libcall = extendsftf2_libfunc;
789 libcall = truncdfsf2_libfunc;
793 libcall = extenddfxf2_libfunc;
797 libcall = extenddftf2_libfunc;
809 libcall = truncxfsf2_libfunc;
813 libcall = truncxfdf2_libfunc;
825 libcall = trunctfsf2_libfunc;
829 libcall = trunctfdf2_libfunc;
841 if (libcall == (rtx) 0)
842 /* This conversion is not implemented yet. */
846 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
848 insns = get_insns ();
850 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
855 /* Now both modes are integers. */
857 /* Handle expanding beyond a word. */
858 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
859 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
866 enum machine_mode lowpart_mode;
867 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
869 /* Try converting directly if the insn is supported. */
870 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 /* If FROM is a SUBREG, put it into a register. Do this
874 so that we always generate the same set of insns for
875 better cse'ing; if an intermediate assignment occurred,
876 we won't be doing the operation directly on the SUBREG. */
877 if (optimize > 0 && GET_CODE (from) == SUBREG)
878 from = force_reg (from_mode, from);
879 emit_unop_insn (code, to, from, equiv_code);
882 /* Next, try converting via full word. */
883 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
884 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
885 != CODE_FOR_nothing))
887 if (GET_CODE (to) == REG)
888 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
889 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
890 emit_unop_insn (code, to,
891 gen_lowpart (word_mode, to), equiv_code);
895 /* No special multiword conversion insn; do it by hand. */
898 /* Since we will turn this into a no conflict block, we must ensure
899 that the source does not overlap the target. */
901 if (reg_overlap_mentioned_p (to, from))
902 from = force_reg (from_mode, from);
904 /* Get a copy of FROM widened to a word, if necessary. */
905 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
906 lowpart_mode = word_mode;
908 lowpart_mode = from_mode;
910 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
912 lowpart = gen_lowpart (lowpart_mode, to);
913 emit_move_insn (lowpart, lowfrom);
915 /* Compute the value to put in each remaining word. */
917 fill_value = const0_rtx;
922 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
923 && STORE_FLAG_VALUE == -1)
925 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
927 fill_value = gen_reg_rtx (word_mode);
928 emit_insn (gen_slt (fill_value));
934 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
935 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
937 fill_value = convert_to_mode (word_mode, fill_value, 1);
941 /* Fill the remaining words. */
942 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
944 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
945 rtx subword = operand_subword (to, index, 1, to_mode);
950 if (fill_value != subword)
951 emit_move_insn (subword, fill_value);
954 insns = get_insns ();
957 emit_no_conflict_block (insns, to, from, NULL_RTX,
958 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
962 /* Truncating multi-word to a word or less. */
963 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
964 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
966 if (!((GET_CODE (from) == MEM
967 && ! MEM_VOLATILE_P (from)
968 && direct_load[(int) to_mode]
969 && ! mode_dependent_address_p (XEXP (from, 0)))
970 || GET_CODE (from) == REG
971 || GET_CODE (from) == SUBREG))
972 from = force_reg (from_mode, from);
973 convert_move (to, gen_lowpart (word_mode, from), 0);
977 /* Handle pointer conversion. */ /* SPEE 900220. */
978 if (to_mode == PQImode)
980 if (from_mode != QImode)
981 from = convert_to_mode (QImode, from, unsignedp);
983 #ifdef HAVE_truncqipqi2
984 if (HAVE_truncqipqi2)
986 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
989 #endif /* HAVE_truncqipqi2 */
993 if (from_mode == PQImode)
995 if (to_mode != QImode)
997 from = convert_to_mode (QImode, from, unsignedp);
1002 #ifdef HAVE_extendpqiqi2
1003 if (HAVE_extendpqiqi2)
1005 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1008 #endif /* HAVE_extendpqiqi2 */
1013 if (to_mode == PSImode)
1015 if (from_mode != SImode)
1016 from = convert_to_mode (SImode, from, unsignedp);
1018 #ifdef HAVE_truncsipsi2
1019 if (HAVE_truncsipsi2)
1021 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1024 #endif /* HAVE_truncsipsi2 */
1028 if (from_mode == PSImode)
1030 if (to_mode != SImode)
1032 from = convert_to_mode (SImode, from, unsignedp);
1037 #ifdef HAVE_extendpsisi2
1038 if (! unsignedp && HAVE_extendpsisi2)
1040 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1043 #endif /* HAVE_extendpsisi2 */
1044 #ifdef HAVE_zero_extendpsisi2
1045 if (unsignedp && HAVE_zero_extendpsisi2)
1047 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1050 #endif /* HAVE_zero_extendpsisi2 */
1055 if (to_mode == PDImode)
1057 if (from_mode != DImode)
1058 from = convert_to_mode (DImode, from, unsignedp);
1060 #ifdef HAVE_truncdipdi2
1061 if (HAVE_truncdipdi2)
1063 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1066 #endif /* HAVE_truncdipdi2 */
1070 if (from_mode == PDImode)
1072 if (to_mode != DImode)
1074 from = convert_to_mode (DImode, from, unsignedp);
1079 #ifdef HAVE_extendpdidi2
1080 if (HAVE_extendpdidi2)
1082 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1085 #endif /* HAVE_extendpdidi2 */
1090 /* Now follow all the conversions between integers
1091 no more than a word long. */
1093 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1094 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (from_mode)))
1098 if (!((GET_CODE (from) == MEM
1099 && ! MEM_VOLATILE_P (from)
1100 && direct_load[(int) to_mode]
1101 && ! mode_dependent_address_p (XEXP (from, 0)))
1102 || GET_CODE (from) == REG
1103 || GET_CODE (from) == SUBREG))
1104 from = force_reg (from_mode, from);
1105 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1106 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1107 from = copy_to_reg (from);
1108 emit_move_insn (to, gen_lowpart (to_mode, from));
1112 /* Handle extension. */
1113 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1115 /* Convert directly if that works. */
1116 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1117 != CODE_FOR_nothing)
1120 from = force_not_mem (from);
1122 emit_unop_insn (code, to, from, equiv_code);
1127 enum machine_mode intermediate;
1131 /* Search for a mode to convert via. */
1132 for (intermediate = from_mode; intermediate != VOIDmode;
1133 intermediate = GET_MODE_WIDER_MODE (intermediate))
1134 if (((can_extend_p (to_mode, intermediate, unsignedp)
1135 != CODE_FOR_nothing)
1136 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1137 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1138 GET_MODE_BITSIZE (intermediate))))
1139 && (can_extend_p (intermediate, from_mode, unsignedp)
1140 != CODE_FOR_nothing))
1142 convert_move (to, convert_to_mode (intermediate, from,
1143 unsignedp), unsignedp);
1147 /* No suitable intermediate mode.
1148 Generate what we need with shifts. */
1149 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1150 - GET_MODE_BITSIZE (from_mode), 0);
1151 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1152 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1154 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1157 emit_move_insn (to, tmp);
1162 /* Support special truncate insns for certain modes. */
1164 if (from_mode == DImode && to_mode == SImode)
1166 #ifdef HAVE_truncdisi2
1167 if (HAVE_truncdisi2)
1169 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == DImode && to_mode == HImode)
1179 #ifdef HAVE_truncdihi2
1180 if (HAVE_truncdihi2)
1182 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == DImode && to_mode == QImode)
1192 #ifdef HAVE_truncdiqi2
1193 if (HAVE_truncdiqi2)
1195 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == SImode && to_mode == HImode)
1205 #ifdef HAVE_truncsihi2
1206 if (HAVE_truncsihi2)
1208 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 if (from_mode == SImode && to_mode == QImode)
1218 #ifdef HAVE_truncsiqi2
1219 if (HAVE_truncsiqi2)
1221 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 if (from_mode == HImode && to_mode == QImode)
1231 #ifdef HAVE_trunchiqi2
1232 if (HAVE_trunchiqi2)
1234 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 if (from_mode == TImode && to_mode == DImode)
1244 #ifdef HAVE_trunctidi2
1245 if (HAVE_trunctidi2)
1247 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 if (from_mode == TImode && to_mode == SImode)
1257 #ifdef HAVE_trunctisi2
1258 if (HAVE_trunctisi2)
1260 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 if (from_mode == TImode && to_mode == HImode)
1270 #ifdef HAVE_trunctihi2
1271 if (HAVE_trunctihi2)
1273 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1277 convert_move (to, force_reg (from_mode, from), unsignedp);
1281 if (from_mode == TImode && to_mode == QImode)
1283 #ifdef HAVE_trunctiqi2
1284 if (HAVE_trunctiqi2)
1286 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1290 convert_move (to, force_reg (from_mode, from), unsignedp);
1294 /* Handle truncation of volatile memrefs, and so on;
1295 the things that couldn't be truncated directly,
1296 and for which there was no special instruction. */
1297 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1299 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1300 emit_move_insn (to, temp);
1304 /* Mode combination is not recognized. */
1308 /* Return an rtx for a value that would result
1309 from converting X to mode MODE.
1310 Both X and MODE may be floating, or both integer.
1311 UNSIGNEDP is nonzero if X is an unsigned value.
1312 This can be done by referring to a part of X in place
1313 or by copying to a new temporary with conversion.
1315 This function *must not* call protect_from_queue
1316 except when putting X into an insn (in which case convert_move does it). */
1319 convert_to_mode (mode, x, unsignedp)
1320 enum machine_mode mode;
1324 return convert_modes (mode, VOIDmode, x, unsignedp);
1327 /* Return an rtx for a value that would result
1328 from converting X from mode OLDMODE to mode MODE.
1329 Both modes may be floating, or both integer.
1330 UNSIGNEDP is nonzero if X is an unsigned value.
1332 This can be done by referring to a part of X in place
1333 or by copying to a new temporary with conversion.
1335 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1337 This function *must not* call protect_from_queue
1338 except when putting X into an insn (in which case convert_move does it). */
1341 convert_modes (mode, oldmode, x, unsignedp)
1342 enum machine_mode mode, oldmode;
1348 /* If FROM is a SUBREG that indicates that we have already done at least
1349 the required extension, strip it. */
1351 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1352 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1353 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1354 x = gen_lowpart (mode, x);
1356 if (GET_MODE (x) != VOIDmode)
1357 oldmode = GET_MODE (x);
1359 if (mode == oldmode)
1362 /* There is one case that we must handle specially: If we are converting
1363 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1364 we are to interpret the constant as unsigned, gen_lowpart will do
1365 the wrong if the constant appears negative. What we want to do is
1366 make the high-order word of the constant zero, not all ones. */
1368 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1370 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1372 HOST_WIDE_INT val = INTVAL (x);
1374 if (oldmode != VOIDmode
1375 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1377 int width = GET_MODE_BITSIZE (oldmode);
1379 /* We need to zero extend VAL. */
1380 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1383 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1386 /* We can do this with a gen_lowpart if both desired and current modes
1387 are integer, and this is either a constant integer, a register, or a
1388 non-volatile MEM. Except for the constant case where MODE is no
1389 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1391 if ((GET_CODE (x) == CONST_INT
1392 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1393 || (GET_MODE_CLASS (mode) == MODE_INT
1394 && GET_MODE_CLASS (oldmode) == MODE_INT
1395 && (GET_CODE (x) == CONST_DOUBLE
1396 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1397 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1398 && direct_load[(int) mode])
1399 || (GET_CODE (x) == REG
1400 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1401 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1403 /* ?? If we don't know OLDMODE, we have to assume here that
1404 X does not need sign- or zero-extension. This may not be
1405 the case, but it's the best we can do. */
1406 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1407 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1409 HOST_WIDE_INT val = INTVAL (x);
1410 int width = GET_MODE_BITSIZE (oldmode);
1412 /* We must sign or zero-extend in this case. Start by
1413 zero-extending, then sign extend if we need to. */
1414 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1416 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1417 val |= (HOST_WIDE_INT) (-1) << width;
1419 return gen_int_mode (val, mode);
1422 return gen_lowpart (mode, x);
1425 temp = gen_reg_rtx (mode);
1426 convert_move (temp, x, unsignedp);
1430 /* This macro is used to determine what the largest unit size that
1431 move_by_pieces can use is. */
1433 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1434 move efficiently, as opposed to MOVE_MAX which is the maximum
1435 number of bytes we can move with a single instruction. */
1437 #ifndef MOVE_MAX_PIECES
1438 #define MOVE_MAX_PIECES MOVE_MAX
1441 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1442 store efficiently. Due to internal GCC limitations, this is
1443 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1444 for an immediate constant. */
1446 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1448 /* Generate several move instructions to copy LEN bytes from block FROM to
1449 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1450 and TO through protect_from_queue before calling.
1452 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1453 used to push FROM to the stack.
1455 ALIGN is maximum alignment we can assume. */
1458 move_by_pieces (to, from, len, align)
1460 unsigned HOST_WIDE_INT len;
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1470 data.from_addr = from_addr;
1473 to_addr = XEXP (to, 0);
1476 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1477 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1479 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1486 #ifdef STACK_GROWS_DOWNWARD
1492 data.to_addr = to_addr;
1495 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1496 || GET_CODE (from_addr) == POST_INC
1497 || GET_CODE (from_addr) == POST_DEC);
1499 data.explicit_inc_from = 0;
1500 data.explicit_inc_to = 0;
1501 if (data.reverse) data.offset = len;
1504 /* If copying requires more than two move insns,
1505 copy addresses to registers (to make displacements shorter)
1506 and use post-increment if available. */
1507 if (!(data.autinc_from && data.autinc_to)
1508 && move_by_pieces_ninsns (len, align) > 2)
1510 /* Find the mode of the largest move... */
1511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1513 if (GET_MODE_SIZE (tmode) < max_size)
1516 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1518 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1519 data.autinc_from = 1;
1520 data.explicit_inc_from = -1;
1522 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1524 data.from_addr = copy_addr_to_reg (from_addr);
1525 data.autinc_from = 1;
1526 data.explicit_inc_from = 1;
1528 if (!data.autinc_from && CONSTANT_P (from_addr))
1529 data.from_addr = copy_addr_to_reg (from_addr);
1530 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1532 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1534 data.explicit_inc_to = -1;
1536 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1538 data.to_addr = copy_addr_to_reg (to_addr);
1540 data.explicit_inc_to = 1;
1542 if (!data.autinc_to && CONSTANT_P (to_addr))
1543 data.to_addr = copy_addr_to_reg (to_addr);
1546 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1547 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1548 align = MOVE_MAX * BITS_PER_UNIT;
1550 /* First move what we can in the largest integer mode, then go to
1551 successively smaller modes. */
1553 while (max_size > 1)
1555 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1556 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1557 if (GET_MODE_SIZE (tmode) < max_size)
1560 if (mode == VOIDmode)
1563 icode = mov_optab->handlers[(int) mode].insn_code;
1564 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1565 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1567 max_size = GET_MODE_SIZE (mode);
1570 /* The code above should have handled everything. */
1575 /* Return number of insns required to move L bytes by pieces.
1576 ALIGN (in bits) is maximum alignment we can assume. */
1578 static unsigned HOST_WIDE_INT
1579 move_by_pieces_ninsns (l, align)
1580 unsigned HOST_WIDE_INT l;
1583 unsigned HOST_WIDE_INT n_insns = 0;
1584 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1586 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1587 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1588 align = MOVE_MAX * BITS_PER_UNIT;
1590 while (max_size > 1)
1592 enum machine_mode mode = VOIDmode, tmode;
1593 enum insn_code icode;
1595 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1596 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1597 if (GET_MODE_SIZE (tmode) < max_size)
1600 if (mode == VOIDmode)
1603 icode = mov_optab->handlers[(int) mode].insn_code;
1604 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1605 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1607 max_size = GET_MODE_SIZE (mode);
1615 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1616 with move instructions for mode MODE. GENFUN is the gen_... function
1617 to make a move insn for that mode. DATA has all the other info. */
1620 move_by_pieces_1 (genfun, mode, data)
1621 rtx (*genfun) PARAMS ((rtx, ...));
1622 enum machine_mode mode;
1623 struct move_by_pieces *data;
1625 unsigned int size = GET_MODE_SIZE (mode);
1626 rtx to1 = NULL_RTX, from1;
1628 while (data->len >= size)
1631 data->offset -= size;
1635 if (data->autinc_to)
1636 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1639 to1 = adjust_address (data->to, mode, data->offset);
1642 if (data->autinc_from)
1643 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1646 from1 = adjust_address (data->from, mode, data->offset);
1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1649 emit_insn (gen_add2_insn (data->to_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
1651 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1652 emit_insn (gen_add2_insn (data->from_addr,
1653 GEN_INT (-(HOST_WIDE_INT)size)));
1656 emit_insn ((*genfun) (to1, from1));
1659 #ifdef PUSH_ROUNDING
1660 emit_single_push_insn (mode, from1, NULL);
1666 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1667 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1668 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1669 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1671 if (! data->reverse)
1672 data->offset += size;
1678 /* Emit code to move a block Y to a block X. This may be done with
1679 string-move instructions, with multiple scalar move instructions,
1680 or with a library call.
1682 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1683 SIZE is an rtx that says how long they are.
1684 ALIGN is the maximum alignment we can assume they have.
1685 METHOD describes what kind of copy this is, and what mechanisms may be used.
1687 Return the address of the new block, if memcpy is called and returns it,
1691 emit_block_move (x, y, size, method)
1693 enum block_op_methods method;
1701 case BLOCK_OP_NORMAL:
1702 may_use_call = true;
1705 case BLOCK_OP_CALL_PARM:
1706 may_use_call = block_move_libcall_safe_for_call_parm ();
1708 /* Make inhibit_defer_pop nonzero around the library call
1709 to force it to pop the arguments right away. */
1713 case BLOCK_OP_NO_LIBCALL:
1714 may_use_call = false;
1721 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1723 if (GET_MODE (x) != BLKmode)
1725 if (GET_MODE (y) != BLKmode)
1728 x = protect_from_queue (x, 1);
1729 y = protect_from_queue (y, 0);
1730 size = protect_from_queue (size, 0);
1732 if (GET_CODE (x) != MEM)
1734 if (GET_CODE (y) != MEM)
1739 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1740 can be incorrect is coming from __builtin_memcpy. */
1741 if (GET_CODE (size) == CONST_INT)
1743 x = shallow_copy_rtx (x);
1744 y = shallow_copy_rtx (y);
1745 set_mem_size (x, size);
1746 set_mem_size (y, size);
1749 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1750 move_by_pieces (x, y, INTVAL (size), align);
1751 else if (emit_block_move_via_movstr (x, y, size, align))
1753 else if (may_use_call)
1754 retval = emit_block_move_via_libcall (x, y, size);
1756 emit_block_move_via_loop (x, y, size, align);
1758 if (method == BLOCK_OP_CALL_PARM)
1764 /* A subroutine of emit_block_move. Returns true if calling the
1765 block move libcall will not clobber any parameters which may have
1766 already been placed on the stack. */
1769 block_move_libcall_safe_for_call_parm ()
1775 /* Check to see whether memcpy takes all register arguments. */
1777 takes_regs_uninit, takes_regs_no, takes_regs_yes
1778 } takes_regs = takes_regs_uninit;
1782 case takes_regs_uninit:
1784 CUMULATIVE_ARGS args_so_far;
1787 fn = emit_block_move_libcall_fn (false);
1788 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1790 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1791 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1793 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1794 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1795 if (!tmp || !REG_P (tmp))
1796 goto fail_takes_regs;
1797 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1798 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1800 goto fail_takes_regs;
1802 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1805 takes_regs = takes_regs_yes;
1808 case takes_regs_yes:
1812 takes_regs = takes_regs_no;
1823 /* A subroutine of emit_block_move. Expand a movstr pattern;
1824 return true if successful. */
1827 emit_block_move_via_movstr (x, y, size, align)
1831 /* Try the most limited insn first, because there's no point
1832 including more than one in the machine description unless
1833 the more limited one has some advantage. */
1835 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1836 enum machine_mode mode;
1838 /* Since this is a move insn, we don't care about volatility. */
1841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1842 mode = GET_MODE_WIDER_MODE (mode))
1844 enum insn_code code = movstr_optab[(int) mode];
1845 insn_operand_predicate_fn pred;
1847 if (code != CODE_FOR_nothing
1848 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1849 here because if SIZE is less than the mode mask, as it is
1850 returned by the macro, it will definitely be less than the
1851 actual mode mask. */
1852 && ((GET_CODE (size) == CONST_INT
1853 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1854 <= (GET_MODE_MASK (mode) >> 1)))
1855 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1856 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1857 || (*pred) (x, BLKmode))
1858 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1859 || (*pred) (y, BLKmode))
1860 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1861 || (*pred) (opalign, VOIDmode)))
1864 rtx last = get_last_insn ();
1867 op2 = convert_to_mode (mode, size, 1);
1868 pred = insn_data[(int) code].operand[2].predicate;
1869 if (pred != 0 && ! (*pred) (op2, mode))
1870 op2 = copy_to_mode_reg (mode, op2);
1872 /* ??? When called via emit_block_move_for_call, it'd be
1873 nice if there were some way to inform the backend, so
1874 that it doesn't fail the expansion because it thinks
1875 emitting the libcall would be more efficient. */
1877 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1885 delete_insns_since (last);
1893 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1894 Return the return value from memcpy, 0 otherwise. */
1897 emit_block_move_via_libcall (dst, src, size)
1900 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1901 enum machine_mode size_mode;
1904 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1906 It is unsafe to save the value generated by protect_from_queue
1907 and reuse it later. Consider what happens if emit_queue is
1908 called before the return value from protect_from_queue is used.
1910 Expansion of the CALL_EXPR below will call emit_queue before
1911 we are finished emitting RTL for argument setup. So if we are
1912 not careful we could get the wrong value for an argument.
1914 To avoid this problem we go ahead and emit code to copy X, Y &
1915 SIZE into new pseudos. We can then place those new pseudos
1916 into an RTL_EXPR and use them later, even after a call to
1919 Note this is not strictly needed for library calls since they
1920 do not call emit_queue before loading their arguments. However,
1921 we may need to have library calls call emit_queue in the future
1922 since failing to do so could cause problems for targets which
1923 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1925 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1926 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1928 if (TARGET_MEM_FUNCTIONS)
1929 size_mode = TYPE_MODE (sizetype);
1931 size_mode = TYPE_MODE (unsigned_type_node);
1932 size = convert_to_mode (size_mode, size, 1);
1933 size = copy_to_mode_reg (size_mode, size);
1935 /* It is incorrect to use the libcall calling conventions to call
1936 memcpy in this context. This could be a user call to memcpy and
1937 the user may wish to examine the return value from memcpy. For
1938 targets where libcalls and normal calls have different conventions
1939 for returning pointers, we could end up generating incorrect code.
1941 For convenience, we generate the call to bcopy this way as well. */
1943 dst_tree = make_tree (ptr_type_node, dst);
1944 src_tree = make_tree (ptr_type_node, src);
1945 if (TARGET_MEM_FUNCTIONS)
1946 size_tree = make_tree (sizetype, size);
1948 size_tree = make_tree (unsigned_type_node, size);
1950 fn = emit_block_move_libcall_fn (true);
1951 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1952 if (TARGET_MEM_FUNCTIONS)
1954 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1955 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1959 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1960 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1963 /* Now we have to build up the CALL_EXPR itself. */
1964 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1965 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1966 call_expr, arg_list, NULL_TREE);
1967 TREE_SIDE_EFFECTS (call_expr) = 1;
1969 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1971 /* If we are initializing a readonly value, show the above call
1972 clobbered it. Otherwise, a load from it may erroneously be
1973 hoisted from a loop. */
1974 if (RTX_UNCHANGING_P (dst))
1975 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1977 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1980 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1981 for the function we use for block copies. The first time FOR_CALL
1982 is true, we call assemble_external. */
1984 static GTY(()) tree block_move_fn;
1987 emit_block_move_libcall_fn (for_call)
1990 static bool emitted_extern;
1991 tree fn = block_move_fn, args;
1995 if (TARGET_MEM_FUNCTIONS)
1997 fn = get_identifier ("memcpy");
1998 args = build_function_type_list (ptr_type_node, ptr_type_node,
1999 const_ptr_type_node, sizetype,
2004 fn = get_identifier ("bcopy");
2005 args = build_function_type_list (void_type_node, const_ptr_type_node,
2006 ptr_type_node, unsigned_type_node,
2010 fn = build_decl (FUNCTION_DECL, fn, args);
2011 DECL_EXTERNAL (fn) = 1;
2012 TREE_PUBLIC (fn) = 1;
2013 DECL_ARTIFICIAL (fn) = 1;
2014 TREE_NOTHROW (fn) = 1;
2019 if (for_call && !emitted_extern)
2021 emitted_extern = true;
2022 make_decl_rtl (fn, NULL);
2023 assemble_external (fn);
2029 /* A subroutine of emit_block_move. Copy the data via an explicit
2030 loop. This is used only when libcalls are forbidden. */
2031 /* ??? It'd be nice to copy in hunks larger than QImode. */
2034 emit_block_move_via_loop (x, y, size, align)
2036 unsigned int align ATTRIBUTE_UNUSED;
2038 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2039 enum machine_mode iter_mode;
2041 iter_mode = GET_MODE (size);
2042 if (iter_mode == VOIDmode)
2043 iter_mode = word_mode;
2045 top_label = gen_label_rtx ();
2046 cmp_label = gen_label_rtx ();
2047 iter = gen_reg_rtx (iter_mode);
2049 emit_move_insn (iter, const0_rtx);
2051 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2052 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2053 do_pending_stack_adjust ();
2055 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2057 emit_jump (cmp_label);
2058 emit_label (top_label);
2060 tmp = convert_modes (Pmode, iter_mode, iter, true);
2061 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2062 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2063 x = change_address (x, QImode, x_addr);
2064 y = change_address (y, QImode, y_addr);
2066 emit_move_insn (x, y);
2068 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2069 true, OPTAB_LIB_WIDEN);
2071 emit_move_insn (iter, tmp);
2073 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2074 emit_label (cmp_label);
2076 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2079 emit_note (NULL, NOTE_INSN_LOOP_END);
2082 /* Copy all or part of a value X into registers starting at REGNO.
2083 The number of registers to be filled is NREGS. */
2086 move_block_to_reg (regno, x, nregs, mode)
2090 enum machine_mode mode;
2093 #ifdef HAVE_load_multiple
2101 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2102 x = validize_mem (force_const_mem (mode, x));
2104 /* See if the machine can do this with a load multiple insn. */
2105 #ifdef HAVE_load_multiple
2106 if (HAVE_load_multiple)
2108 last = get_last_insn ();
2109 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2117 delete_insns_since (last);
2121 for (i = 0; i < nregs; i++)
2122 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2123 operand_subword_force (x, i, mode));
2126 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2127 The number of registers to be filled is NREGS. SIZE indicates the number
2128 of bytes in the object X. */
2131 move_block_from_reg (regno, x, nregs, size)
2138 #ifdef HAVE_store_multiple
2142 enum machine_mode mode;
2147 /* If SIZE is that of a mode no bigger than a word, just use that
2148 mode's store operation. */
2149 if (size <= UNITS_PER_WORD
2150 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2151 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2153 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2157 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2158 to the left before storing to memory. Note that the previous test
2159 doesn't handle all cases (e.g. SIZE == 3). */
2160 if (size < UNITS_PER_WORD
2162 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2164 rtx tem = operand_subword (x, 0, 1, BLKmode);
2170 shift = expand_shift (LSHIFT_EXPR, word_mode,
2171 gen_rtx_REG (word_mode, regno),
2172 build_int_2 ((UNITS_PER_WORD - size)
2173 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2174 emit_move_insn (tem, shift);
2178 /* See if the machine can do this with a store multiple insn. */
2179 #ifdef HAVE_store_multiple
2180 if (HAVE_store_multiple)
2182 last = get_last_insn ();
2183 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2191 delete_insns_since (last);
2195 for (i = 0; i < nregs; i++)
2197 rtx tem = operand_subword (x, i, 1, BLKmode);
2202 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2206 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2207 ORIG, where ORIG is a non-consecutive group of registers represented by
2208 a PARALLEL. The clone is identical to the original except in that the
2209 original set of registers is replaced by a new set of pseudo registers.
2210 The new set has the same modes as the original set. */
2213 gen_group_rtx (orig)
2219 if (GET_CODE (orig) != PARALLEL)
2222 length = XVECLEN (orig, 0);
2223 tmps = (rtx *) alloca (sizeof (rtx) * length);
2225 /* Skip a NULL entry in first slot. */
2226 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2231 for (; i < length; i++)
2233 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2234 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2236 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2239 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2242 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2243 registers represented by a PARALLEL. SSIZE represents the total size of
2244 block SRC in bytes, or -1 if not known. */
2245 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2246 the balance will be in what would be the low-order memory addresses, i.e.
2247 left justified for big endian, right justified for little endian. This
2248 happens to be true for the targets currently using this support. If this
2249 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2253 emit_group_load (dst, orig_src, ssize)
2260 if (GET_CODE (dst) != PARALLEL)
2263 /* Check for a NULL entry, used to indicate that the parameter goes
2264 both on the stack and in registers. */
2265 if (XEXP (XVECEXP (dst, 0, 0), 0))
2270 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2272 /* Process the pieces. */
2273 for (i = start; i < XVECLEN (dst, 0); i++)
2275 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2276 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2277 unsigned int bytelen = GET_MODE_SIZE (mode);
2280 /* Handle trailing fragments that run over the size of the struct. */
2281 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2283 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2284 bytelen = ssize - bytepos;
2289 /* If we won't be loading directly from memory, protect the real source
2290 from strange tricks we might play; but make sure that the source can
2291 be loaded directly into the destination. */
2293 if (GET_CODE (orig_src) != MEM
2294 && (!CONSTANT_P (orig_src)
2295 || (GET_MODE (orig_src) != mode
2296 && GET_MODE (orig_src) != VOIDmode)))
2298 if (GET_MODE (orig_src) == VOIDmode)
2299 src = gen_reg_rtx (mode);
2301 src = gen_reg_rtx (GET_MODE (orig_src));
2303 emit_move_insn (src, orig_src);
2306 /* Optimize the access just a bit. */
2307 if (GET_CODE (src) == MEM
2308 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2309 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2310 && bytelen == GET_MODE_SIZE (mode))
2312 tmps[i] = gen_reg_rtx (mode);
2313 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2315 else if (GET_CODE (src) == CONCAT)
2317 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2318 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2320 if ((bytepos == 0 && bytelen == slen0)
2321 || (bytepos != 0 && bytepos + bytelen <= slen))
2323 /* The following assumes that the concatenated objects all
2324 have the same size. In this case, a simple calculation
2325 can be used to determine the object and the bit field
2327 tmps[i] = XEXP (src, bytepos / slen0);
2328 if (! CONSTANT_P (tmps[i])
2329 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2330 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2331 (bytepos % slen0) * BITS_PER_UNIT,
2332 1, NULL_RTX, mode, mode, ssize);
2334 else if (bytepos == 0)
2336 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2337 emit_move_insn (mem, src);
2338 tmps[i] = adjust_address (mem, mode, 0);
2343 else if (CONSTANT_P (src)
2344 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2347 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2348 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2351 if (BYTES_BIG_ENDIAN && shift)
2352 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2353 tmps[i], 0, OPTAB_WIDEN);
2358 /* Copy the extracted pieces into the proper (probable) hard regs. */
2359 for (i = start; i < XVECLEN (dst, 0); i++)
2360 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2363 /* Emit code to move a block SRC to block DST, where SRC and DST are
2364 non-consecutive groups of registers, each represented by a PARALLEL. */
2367 emit_group_move (dst, src)
2372 if (GET_CODE (src) != PARALLEL
2373 || GET_CODE (dst) != PARALLEL
2374 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2377 /* Skip first entry if NULL. */
2378 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2379 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2380 XEXP (XVECEXP (src, 0, i), 0));
2383 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2384 registers represented by a PARALLEL. SSIZE represents the total size of
2385 block DST, or -1 if not known. */
2388 emit_group_store (orig_dst, src, ssize)
2395 if (GET_CODE (src) != PARALLEL)
2398 /* Check for a NULL entry, used to indicate that the parameter goes
2399 both on the stack and in registers. */
2400 if (XEXP (XVECEXP (src, 0, 0), 0))
2405 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2407 /* Copy the (probable) hard regs into pseudos. */
2408 for (i = start; i < XVECLEN (src, 0); i++)
2410 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2411 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2412 emit_move_insn (tmps[i], reg);
2416 /* If we won't be storing directly into memory, protect the real destination
2417 from strange tricks we might play. */
2419 if (GET_CODE (dst) == PARALLEL)
2423 /* We can get a PARALLEL dst if there is a conditional expression in
2424 a return statement. In that case, the dst and src are the same,
2425 so no action is necessary. */
2426 if (rtx_equal_p (dst, src))
2429 /* It is unclear if we can ever reach here, but we may as well handle
2430 it. Allocate a temporary, and split this into a store/load to/from
2433 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2434 emit_group_store (temp, src, ssize);
2435 emit_group_load (dst, temp, ssize);
2438 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2440 dst = gen_reg_rtx (GET_MODE (orig_dst));
2441 /* Make life a bit easier for combine. */
2442 emit_move_insn (dst, const0_rtx);
2445 /* Process the pieces. */
2446 for (i = start; i < XVECLEN (src, 0); i++)
2448 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2449 enum machine_mode mode = GET_MODE (tmps[i]);
2450 unsigned int bytelen = GET_MODE_SIZE (mode);
2453 /* Handle trailing fragments that run over the size of the struct. */
2454 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2456 if (BYTES_BIG_ENDIAN)
2458 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2459 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2460 tmps[i], 0, OPTAB_WIDEN);
2462 bytelen = ssize - bytepos;
2465 if (GET_CODE (dst) == CONCAT)
2467 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2468 dest = XEXP (dst, 0);
2469 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2471 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2472 dest = XEXP (dst, 1);
2478 /* Optimize the access just a bit. */
2479 if (GET_CODE (dest) == MEM
2480 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2481 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2482 && bytelen == GET_MODE_SIZE (mode))
2483 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2485 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2486 mode, tmps[i], ssize);
2491 /* Copy from the pseudo into the (probable) hard reg. */
2492 if (GET_CODE (dst) == REG)
2493 emit_move_insn (orig_dst, dst);
2496 /* Generate code to copy a BLKmode object of TYPE out of a
2497 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2498 is null, a stack temporary is created. TGTBLK is returned.
2500 The primary purpose of this routine is to handle functions
2501 that return BLKmode structures in registers. Some machines
2502 (the PA for example) want to return all small structures
2503 in registers regardless of the structure's alignment. */
2506 copy_blkmode_from_reg (tgtblk, srcreg, type)
2511 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2512 rtx src = NULL, dst = NULL;
2513 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2514 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2518 tgtblk = assign_temp (build_qualified_type (type,
2520 | TYPE_QUAL_CONST)),
2522 preserve_temp_slots (tgtblk);
2525 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2526 into a new pseudo which is a full word.
2528 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2529 the wrong part of the register gets copied so we fake a type conversion
2531 if (GET_MODE (srcreg) != BLKmode
2532 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2534 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2535 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2537 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2540 /* Structures whose size is not a multiple of a word are aligned
2541 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2542 machine, this means we must skip the empty high order bytes when
2543 calculating the bit offset. */
2544 if (BYTES_BIG_ENDIAN
2545 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2546 && bytes % UNITS_PER_WORD)
2547 big_endian_correction
2548 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2550 /* Copy the structure BITSIZE bites at a time.
2552 We could probably emit more efficient code for machines which do not use
2553 strict alignment, but it doesn't seem worth the effort at the current
2555 for (bitpos = 0, xbitpos = big_endian_correction;
2556 bitpos < bytes * BITS_PER_UNIT;
2557 bitpos += bitsize, xbitpos += bitsize)
2559 /* We need a new source operand each time xbitpos is on a
2560 word boundary and when xbitpos == big_endian_correction
2561 (the first time through). */
2562 if (xbitpos % BITS_PER_WORD == 0
2563 || xbitpos == big_endian_correction)
2564 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2567 /* We need a new destination operand each time bitpos is on
2569 if (bitpos % BITS_PER_WORD == 0)
2570 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2572 /* Use xbitpos for the source extraction (right justified) and
2573 xbitpos for the destination store (left justified). */
2574 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2575 extract_bit_field (src, bitsize,
2576 xbitpos % BITS_PER_WORD, 1,
2577 NULL_RTX, word_mode, word_mode,
2585 /* Add a USE expression for REG to the (possibly empty) list pointed
2586 to by CALL_FUSAGE. REG must denote a hard register. */
2589 use_reg (call_fusage, reg)
2590 rtx *call_fusage, reg;
2592 if (GET_CODE (reg) != REG
2593 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2597 = gen_rtx_EXPR_LIST (VOIDmode,
2598 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2601 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2602 starting at REGNO. All of these registers must be hard registers. */
2605 use_regs (call_fusage, regno, nregs)
2612 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2615 for (i = 0; i < nregs; i++)
2616 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2619 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2620 PARALLEL REGS. This is for calls that pass values in multiple
2621 non-contiguous locations. The Irix 6 ABI has examples of this. */
2624 use_group_regs (call_fusage, regs)
2630 for (i = 0; i < XVECLEN (regs, 0); i++)
2632 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2634 /* A NULL entry means the parameter goes both on the stack and in
2635 registers. This can also be a MEM for targets that pass values
2636 partially on the stack and partially in registers. */
2637 if (reg != 0 && GET_CODE (reg) == REG)
2638 use_reg (call_fusage, reg);
2643 /* Determine whether the LEN bytes generated by CONSTFUN can be
2644 stored to memory using several move instructions. CONSTFUNDATA is
2645 a pointer which will be passed as argument in every CONSTFUN call.
2646 ALIGN is maximum alignment we can assume. Return nonzero if a
2647 call to store_by_pieces should succeed. */
2650 can_store_by_pieces (len, constfun, constfundata, align)
2651 unsigned HOST_WIDE_INT len;
2652 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2656 unsigned HOST_WIDE_INT max_size, l;
2657 HOST_WIDE_INT offset = 0;
2658 enum machine_mode mode, tmode;
2659 enum insn_code icode;
2663 if (! MOVE_BY_PIECES_P (len, align))
2666 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2667 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2668 align = MOVE_MAX * BITS_PER_UNIT;
2670 /* We would first store what we can in the largest integer mode, then go to
2671 successively smaller modes. */
2674 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2679 max_size = STORE_MAX_PIECES + 1;
2680 while (max_size > 1)
2682 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2683 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2684 if (GET_MODE_SIZE (tmode) < max_size)
2687 if (mode == VOIDmode)
2690 icode = mov_optab->handlers[(int) mode].insn_code;
2691 if (icode != CODE_FOR_nothing
2692 && align >= GET_MODE_ALIGNMENT (mode))
2694 unsigned int size = GET_MODE_SIZE (mode);
2701 cst = (*constfun) (constfundata, offset, mode);
2702 if (!LEGITIMATE_CONSTANT_P (cst))
2712 max_size = GET_MODE_SIZE (mode);
2715 /* The code above should have handled everything. */
2723 /* Generate several move instructions to store LEN bytes generated by
2724 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2725 pointer which will be passed as argument in every CONSTFUN call.
2726 ALIGN is maximum alignment we can assume. */
2729 store_by_pieces (to, len, constfun, constfundata, align)
2731 unsigned HOST_WIDE_INT len;
2732 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2736 struct store_by_pieces data;
2738 if (! MOVE_BY_PIECES_P (len, align))
2740 to = protect_from_queue (to, 1);
2741 data.constfun = constfun;
2742 data.constfundata = constfundata;
2745 store_by_pieces_1 (&data, align);
2748 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2749 rtx with BLKmode). The caller must pass TO through protect_from_queue
2750 before calling. ALIGN is maximum alignment we can assume. */
2753 clear_by_pieces (to, len, align)
2755 unsigned HOST_WIDE_INT len;
2758 struct store_by_pieces data;
2760 data.constfun = clear_by_pieces_1;
2761 data.constfundata = NULL;
2764 store_by_pieces_1 (&data, align);
2767 /* Callback routine for clear_by_pieces.
2768 Return const0_rtx unconditionally. */
2771 clear_by_pieces_1 (data, offset, mode)
2772 PTR data ATTRIBUTE_UNUSED;
2773 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2774 enum machine_mode mode ATTRIBUTE_UNUSED;
2779 /* Subroutine of clear_by_pieces and store_by_pieces.
2780 Generate several move instructions to store LEN bytes of block TO. (A MEM
2781 rtx with BLKmode). The caller must pass TO through protect_from_queue
2782 before calling. ALIGN is maximum alignment we can assume. */
2785 store_by_pieces_1 (data, align)
2786 struct store_by_pieces *data;
2789 rtx to_addr = XEXP (data->to, 0);
2790 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2791 enum machine_mode mode = VOIDmode, tmode;
2792 enum insn_code icode;
2795 data->to_addr = to_addr;
2797 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2798 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2800 data->explicit_inc_to = 0;
2802 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2804 data->offset = data->len;
2806 /* If storing requires more than two move insns,
2807 copy addresses to registers (to make displacements shorter)
2808 and use post-increment if available. */
2809 if (!data->autinc_to
2810 && move_by_pieces_ninsns (data->len, align) > 2)
2812 /* Determine the main mode we'll be using. */
2813 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2814 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2815 if (GET_MODE_SIZE (tmode) < max_size)
2818 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2820 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2821 data->autinc_to = 1;
2822 data->explicit_inc_to = -1;
2825 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2826 && ! data->autinc_to)
2828 data->to_addr = copy_addr_to_reg (to_addr);
2829 data->autinc_to = 1;
2830 data->explicit_inc_to = 1;
2833 if ( !data->autinc_to && CONSTANT_P (to_addr))
2834 data->to_addr = copy_addr_to_reg (to_addr);
2837 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2838 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2839 align = MOVE_MAX * BITS_PER_UNIT;
2841 /* First store what we can in the largest integer mode, then go to
2842 successively smaller modes. */
2844 while (max_size > 1)
2846 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2847 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2848 if (GET_MODE_SIZE (tmode) < max_size)
2851 if (mode == VOIDmode)
2854 icode = mov_optab->handlers[(int) mode].insn_code;
2855 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2856 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2858 max_size = GET_MODE_SIZE (mode);
2861 /* The code above should have handled everything. */
2866 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2867 with move instructions for mode MODE. GENFUN is the gen_... function
2868 to make a move insn for that mode. DATA has all the other info. */
2871 store_by_pieces_2 (genfun, mode, data)
2872 rtx (*genfun) PARAMS ((rtx, ...));
2873 enum machine_mode mode;
2874 struct store_by_pieces *data;
2876 unsigned int size = GET_MODE_SIZE (mode);
2879 while (data->len >= size)
2882 data->offset -= size;
2884 if (data->autinc_to)
2885 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2888 to1 = adjust_address (data->to, mode, data->offset);
2890 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2891 emit_insn (gen_add2_insn (data->to_addr,
2892 GEN_INT (-(HOST_WIDE_INT) size)));
2894 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2895 emit_insn ((*genfun) (to1, cst));
2897 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2898 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2900 if (! data->reverse)
2901 data->offset += size;
2907 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2908 its length in bytes. */
2911 clear_storage (object, size)
2916 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2917 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2919 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2920 just move a zero. Otherwise, do this a piece at a time. */
2921 if (GET_MODE (object) != BLKmode
2922 && GET_CODE (size) == CONST_INT
2923 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2924 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2927 object = protect_from_queue (object, 1);
2928 size = protect_from_queue (size, 0);
2930 if (GET_CODE (size) == CONST_INT
2931 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2932 clear_by_pieces (object, INTVAL (size), align);
2933 else if (clear_storage_via_clrstr (object, size, align))
2936 retval = clear_storage_via_libcall (object, size);
2942 /* A subroutine of clear_storage. Expand a clrstr pattern;
2943 return true if successful. */
2946 clear_storage_via_clrstr (object, size, align)
2950 /* Try the most limited insn first, because there's no point
2951 including more than one in the machine description unless
2952 the more limited one has some advantage. */
2954 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2955 enum machine_mode mode;
2957 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2958 mode = GET_MODE_WIDER_MODE (mode))
2960 enum insn_code code = clrstr_optab[(int) mode];
2961 insn_operand_predicate_fn pred;
2963 if (code != CODE_FOR_nothing
2964 /* We don't need MODE to be narrower than
2965 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2966 the mode mask, as it is returned by the macro, it will
2967 definitely be less than the actual mode mask. */
2968 && ((GET_CODE (size) == CONST_INT
2969 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2970 <= (GET_MODE_MASK (mode) >> 1)))
2971 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2972 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2973 || (*pred) (object, BLKmode))
2974 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2975 || (*pred) (opalign, VOIDmode)))
2978 rtx last = get_last_insn ();
2981 op1 = convert_to_mode (mode, size, 1);
2982 pred = insn_data[(int) code].operand[1].predicate;
2983 if (pred != 0 && ! (*pred) (op1, mode))
2984 op1 = copy_to_mode_reg (mode, op1);
2986 pat = GEN_FCN ((int) code) (object, op1, opalign);
2993 delete_insns_since (last);
3000 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3001 Return the return value of memset, 0 otherwise. */
3004 clear_storage_via_libcall (object, size)
3007 tree call_expr, arg_list, fn, object_tree, size_tree;
3008 enum machine_mode size_mode;
3011 /* OBJECT or SIZE may have been passed through protect_from_queue.
3013 It is unsafe to save the value generated by protect_from_queue
3014 and reuse it later. Consider what happens if emit_queue is
3015 called before the return value from protect_from_queue is used.
3017 Expansion of the CALL_EXPR below will call emit_queue before
3018 we are finished emitting RTL for argument setup. So if we are
3019 not careful we could get the wrong value for an argument.
3021 To avoid this problem we go ahead and emit code to copy OBJECT
3022 and SIZE into new pseudos. We can then place those new pseudos
3023 into an RTL_EXPR and use them later, even after a call to
3026 Note this is not strictly needed for library calls since they
3027 do not call emit_queue before loading their arguments. However,
3028 we may need to have library calls call emit_queue in the future
3029 since failing to do so could cause problems for targets which
3030 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3032 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3034 if (TARGET_MEM_FUNCTIONS)
3035 size_mode = TYPE_MODE (sizetype);
3037 size_mode = TYPE_MODE (unsigned_type_node);
3038 size = convert_to_mode (size_mode, size, 1);
3039 size = copy_to_mode_reg (size_mode, size);
3041 /* It is incorrect to use the libcall calling conventions to call
3042 memset in this context. This could be a user call to memset and
3043 the user may wish to examine the return value from memset. For
3044 targets where libcalls and normal calls have different conventions
3045 for returning pointers, we could end up generating incorrect code.
3047 For convenience, we generate the call to bzero this way as well. */
3049 object_tree = make_tree (ptr_type_node, object);
3050 if (TARGET_MEM_FUNCTIONS)
3051 size_tree = make_tree (sizetype, size);
3053 size_tree = make_tree (unsigned_type_node, size);
3055 fn = clear_storage_libcall_fn (true);
3056 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3057 if (TARGET_MEM_FUNCTIONS)
3058 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3059 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3061 /* Now we have to build up the CALL_EXPR itself. */
3062 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3063 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3064 call_expr, arg_list, NULL_TREE);
3065 TREE_SIDE_EFFECTS (call_expr) = 1;
3067 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3069 /* If we are initializing a readonly value, show the above call
3070 clobbered it. Otherwise, a load from it may erroneously be
3071 hoisted from a loop. */
3072 if (RTX_UNCHANGING_P (object))
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3075 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3078 /* A subroutine of clear_storage_via_libcall. Create the tree node
3079 for the function we use for block clears. The first time FOR_CALL
3080 is true, we call assemble_external. */
3082 static GTY(()) tree block_clear_fn;
3085 clear_storage_libcall_fn (for_call)
3088 static bool emitted_extern;
3089 tree fn = block_clear_fn, args;
3093 if (TARGET_MEM_FUNCTIONS)
3095 fn = get_identifier ("memset");
3096 args = build_function_type_list (ptr_type_node, ptr_type_node,
3097 integer_type_node, sizetype,
3102 fn = get_identifier ("bzero");
3103 args = build_function_type_list (void_type_node, ptr_type_node,
3104 unsigned_type_node, NULL_TREE);
3107 fn = build_decl (FUNCTION_DECL, fn, args);
3108 DECL_EXTERNAL (fn) = 1;
3109 TREE_PUBLIC (fn) = 1;
3110 DECL_ARTIFICIAL (fn) = 1;
3111 TREE_NOTHROW (fn) = 1;
3113 block_clear_fn = fn;
3116 if (for_call && !emitted_extern)
3118 emitted_extern = true;
3119 make_decl_rtl (fn, NULL);
3120 assemble_external (fn);
3126 /* Generate code to copy Y into X.
3127 Both Y and X must have the same mode, except that
3128 Y can be a constant with VOIDmode.
3129 This mode cannot be BLKmode; use emit_block_move for that.
3131 Return the last instruction emitted. */
3134 emit_move_insn (x, y)
3137 enum machine_mode mode = GET_MODE (x);
3138 rtx y_cst = NULL_RTX;
3141 x = protect_from_queue (x, 1);
3142 y = protect_from_queue (y, 0);
3144 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3147 /* Never force constant_p_rtx to memory. */
3148 if (GET_CODE (y) == CONSTANT_P_RTX)
3150 else if (CONSTANT_P (y))
3153 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3154 && (last_insn = compress_float_constant (x, y)))
3157 if (!LEGITIMATE_CONSTANT_P (y))
3160 y = force_const_mem (mode, y);
3164 /* If X or Y are memory references, verify that their addresses are valid
3166 if (GET_CODE (x) == MEM
3167 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3168 && ! push_operand (x, GET_MODE (x)))
3170 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3171 x = validize_mem (x);
3173 if (GET_CODE (y) == MEM
3174 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3176 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3177 y = validize_mem (y);
3179 if (mode == BLKmode)
3182 last_insn = emit_move_insn_1 (x, y);
3184 if (y_cst && GET_CODE (x) == REG)
3185 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3190 /* Low level part of emit_move_insn.
3191 Called just like emit_move_insn, but assumes X and Y
3192 are basically valid. */
3195 emit_move_insn_1 (x, y)
3198 enum machine_mode mode = GET_MODE (x);
3199 enum machine_mode submode;
3200 enum mode_class class = GET_MODE_CLASS (mode);
3202 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3205 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3207 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3209 /* Expand complex moves by moving real part and imag part, if possible. */
3210 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3211 && BLKmode != (submode = GET_MODE_INNER (mode))
3212 && (mov_optab->handlers[(int) submode].insn_code
3213 != CODE_FOR_nothing))
3215 /* Don't split destination if it is a stack push. */
3216 int stack = push_operand (x, GET_MODE (x));
3218 #ifdef PUSH_ROUNDING
3219 /* In case we output to the stack, but the size is smaller machine can
3220 push exactly, we need to use move instructions. */
3222 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3223 != GET_MODE_SIZE (submode)))
3226 HOST_WIDE_INT offset1, offset2;
3228 /* Do not use anti_adjust_stack, since we don't want to update
3229 stack_pointer_delta. */
3230 temp = expand_binop (Pmode,
3231 #ifdef STACK_GROWS_DOWNWARD
3239 (GET_MODE_SIZE (GET_MODE (x)))),
3240 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3242 if (temp != stack_pointer_rtx)
3243 emit_move_insn (stack_pointer_rtx, temp);
3245 #ifdef STACK_GROWS_DOWNWARD
3247 offset2 = GET_MODE_SIZE (submode);
3249 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3250 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3251 + GET_MODE_SIZE (submode));
3254 emit_move_insn (change_address (x, submode,
3255 gen_rtx_PLUS (Pmode,
3257 GEN_INT (offset1))),
3258 gen_realpart (submode, y));
3259 emit_move_insn (change_address (x, submode,
3260 gen_rtx_PLUS (Pmode,
3262 GEN_INT (offset2))),
3263 gen_imagpart (submode, y));
3267 /* If this is a stack, push the highpart first, so it
3268 will be in the argument order.
3270 In that case, change_address is used only to convert
3271 the mode, not to change the address. */
3274 /* Note that the real part always precedes the imag part in memory
3275 regardless of machine's endianness. */
3276 #ifdef STACK_GROWS_DOWNWARD
3277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3278 (gen_rtx_MEM (submode, XEXP (x, 0)),
3279 gen_imagpart (submode, y)));
3280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3281 (gen_rtx_MEM (submode, XEXP (x, 0)),
3282 gen_realpart (submode, y)));
3284 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3285 (gen_rtx_MEM (submode, XEXP (x, 0)),
3286 gen_realpart (submode, y)));
3287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3288 (gen_rtx_MEM (submode, XEXP (x, 0)),
3289 gen_imagpart (submode, y)));
3294 rtx realpart_x, realpart_y;
3295 rtx imagpart_x, imagpart_y;
3297 /* If this is a complex value with each part being smaller than a
3298 word, the usual calling sequence will likely pack the pieces into
3299 a single register. Unfortunately, SUBREG of hard registers only
3300 deals in terms of words, so we have a problem converting input
3301 arguments to the CONCAT of two registers that is used elsewhere
3302 for complex values. If this is before reload, we can copy it into
3303 memory and reload. FIXME, we should see about using extract and
3304 insert on integer registers, but complex short and complex char
3305 variables should be rarely used. */
3306 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3307 && (reload_in_progress | reload_completed) == 0)
3310 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3312 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3314 if (packed_dest_p || packed_src_p)
3316 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3317 ? MODE_FLOAT : MODE_INT);
3319 enum machine_mode reg_mode
3320 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3322 if (reg_mode != BLKmode)
3324 rtx mem = assign_stack_temp (reg_mode,
3325 GET_MODE_SIZE (mode), 0);
3326 rtx cmem = adjust_address (mem, mode, 0);
3329 = N_("function using short complex types cannot be inline");
3333 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3335 emit_move_insn_1 (cmem, y);
3336 return emit_move_insn_1 (sreg, mem);
3340 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3342 emit_move_insn_1 (mem, sreg);
3343 return emit_move_insn_1 (x, cmem);
3349 realpart_x = gen_realpart (submode, x);
3350 realpart_y = gen_realpart (submode, y);
3351 imagpart_x = gen_imagpart (submode, x);
3352 imagpart_y = gen_imagpart (submode, y);
3354 /* Show the output dies here. This is necessary for SUBREGs
3355 of pseudos since we cannot track their lifetimes correctly;
3356 hard regs shouldn't appear here except as return values.
3357 We never want to emit such a clobber after reload. */
3359 && ! (reload_in_progress || reload_completed)
3360 && (GET_CODE (realpart_x) == SUBREG
3361 || GET_CODE (imagpart_x) == SUBREG))
3362 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3364 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3365 (realpart_x, realpart_y));
3366 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3367 (imagpart_x, imagpart_y));
3370 return get_last_insn ();
3373 /* This will handle any multi-word or full-word mode that lacks a move_insn
3374 pattern. However, you will get better code if you define such patterns,
3375 even if they must turn into multiple assembler instructions. */
3376 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3383 #ifdef PUSH_ROUNDING
3385 /* If X is a push on the stack, do the push now and replace
3386 X with a reference to the stack pointer. */
3387 if (push_operand (x, GET_MODE (x)))
3392 /* Do not use anti_adjust_stack, since we don't want to update
3393 stack_pointer_delta. */
3394 temp = expand_binop (Pmode,
3395 #ifdef STACK_GROWS_DOWNWARD
3403 (GET_MODE_SIZE (GET_MODE (x)))),
3404 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3406 if (temp != stack_pointer_rtx)
3407 emit_move_insn (stack_pointer_rtx, temp);
3409 code = GET_CODE (XEXP (x, 0));
3411 /* Just hope that small offsets off SP are OK. */
3412 if (code == POST_INC)
3413 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3414 GEN_INT (-((HOST_WIDE_INT)
3415 GET_MODE_SIZE (GET_MODE (x)))));
3416 else if (code == POST_DEC)
3417 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3418 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3420 temp = stack_pointer_rtx;
3422 x = change_address (x, VOIDmode, temp);
3426 /* If we are in reload, see if either operand is a MEM whose address
3427 is scheduled for replacement. */
3428 if (reload_in_progress && GET_CODE (x) == MEM
3429 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3430 x = replace_equiv_address_nv (x, inner);
3431 if (reload_in_progress && GET_CODE (y) == MEM
3432 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3433 y = replace_equiv_address_nv (y, inner);
3439 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3442 rtx xpart = operand_subword (x, i, 1, mode);
3443 rtx ypart = operand_subword (y, i, 1, mode);
3445 /* If we can't get a part of Y, put Y into memory if it is a
3446 constant. Otherwise, force it into a register. If we still
3447 can't get a part of Y, abort. */
3448 if (ypart == 0 && CONSTANT_P (y))
3450 y = force_const_mem (mode, y);
3451 ypart = operand_subword (y, i, 1, mode);
3453 else if (ypart == 0)
3454 ypart = operand_subword_force (y, i, mode);
3456 if (xpart == 0 || ypart == 0)
3459 need_clobber |= (GET_CODE (xpart) == SUBREG);
3461 last_insn = emit_move_insn (xpart, ypart);
3467 /* Show the output dies here. This is necessary for SUBREGs
3468 of pseudos since we cannot track their lifetimes correctly;
3469 hard regs shouldn't appear here except as return values.
3470 We never want to emit such a clobber after reload. */
3472 && ! (reload_in_progress || reload_completed)
3473 && need_clobber != 0)
3474 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3484 /* If Y is representable exactly in a narrower mode, and the target can
3485 perform the extension directly from constant or memory, then emit the
3486 move as an extension. */
3489 compress_float_constant (x, y)
3492 enum machine_mode dstmode = GET_MODE (x);
3493 enum machine_mode orig_srcmode = GET_MODE (y);
3494 enum machine_mode srcmode;
3497 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3499 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3500 srcmode != orig_srcmode;
3501 srcmode = GET_MODE_WIDER_MODE (srcmode))
3504 rtx trunc_y, last_insn;
3506 /* Skip if the target can't extend this way. */
3507 ic = can_extend_p (dstmode, srcmode, 0);
3508 if (ic == CODE_FOR_nothing)
3511 /* Skip if the narrowed value isn't exact. */
3512 if (! exact_real_truncate (srcmode, &r))
3515 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3517 if (LEGITIMATE_CONSTANT_P (trunc_y))
3519 /* Skip if the target needs extra instructions to perform
3521 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3524 else if (float_extend_from_mem[dstmode][srcmode])
3525 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3529 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3530 last_insn = get_last_insn ();
3532 if (GET_CODE (x) == REG)
3533 REG_NOTES (last_insn)
3534 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3542 /* Pushing data onto the stack. */
3544 /* Push a block of length SIZE (perhaps variable)
3545 and return an rtx to address the beginning of the block.
3546 Note that it is not possible for the value returned to be a QUEUED.
3547 The value may be virtual_outgoing_args_rtx.
3549 EXTRA is the number of bytes of padding to push in addition to SIZE.
3550 BELOW nonzero means this padding comes at low addresses;
3551 otherwise, the padding comes at high addresses. */
3554 push_block (size, extra, below)
3560 size = convert_modes (Pmode, ptr_mode, size, 1);
3561 if (CONSTANT_P (size))
3562 anti_adjust_stack (plus_constant (size, extra));
3563 else if (GET_CODE (size) == REG && extra == 0)
3564 anti_adjust_stack (size);
3567 temp = copy_to_mode_reg (Pmode, size);
3569 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3570 temp, 0, OPTAB_LIB_WIDEN);
3571 anti_adjust_stack (temp);
3574 #ifndef STACK_GROWS_DOWNWARD
3580 temp = virtual_outgoing_args_rtx;
3581 if (extra != 0 && below)
3582 temp = plus_constant (temp, extra);
3586 if (GET_CODE (size) == CONST_INT)
3587 temp = plus_constant (virtual_outgoing_args_rtx,
3588 -INTVAL (size) - (below ? 0 : extra));
3589 else if (extra != 0 && !below)
3590 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3591 negate_rtx (Pmode, plus_constant (size, extra)));
3593 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3594 negate_rtx (Pmode, size));
3597 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3600 #ifdef PUSH_ROUNDING
3602 /* Emit single push insn. */
3605 emit_single_push_insn (mode, x, type)
3607 enum machine_mode mode;
3611 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3613 enum insn_code icode;
3614 insn_operand_predicate_fn pred;
3616 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3617 /* If there is push pattern, use it. Otherwise try old way of throwing
3618 MEM representing push operation to move expander. */
3619 icode = push_optab->handlers[(int) mode].insn_code;
3620 if (icode != CODE_FOR_nothing)
3622 if (((pred = insn_data[(int) icode].operand[0].predicate)
3623 && !((*pred) (x, mode))))
3624 x = force_reg (mode, x);
3625 emit_insn (GEN_FCN (icode) (x));
3628 if (GET_MODE_SIZE (mode) == rounded_size)
3629 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3632 #ifdef STACK_GROWS_DOWNWARD
3633 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3634 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3636 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3637 GEN_INT (rounded_size));
3639 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3642 dest = gen_rtx_MEM (mode, dest_addr);
3646 set_mem_attributes (dest, type, 1);
3648 if (flag_optimize_sibling_calls)
3649 /* Function incoming arguments may overlap with sibling call
3650 outgoing arguments and we cannot allow reordering of reads
3651 from function arguments with stores to outgoing arguments
3652 of sibling calls. */
3653 set_mem_alias_set (dest, 0);
3655 emit_move_insn (dest, x);
3659 /* Generate code to push X onto the stack, assuming it has mode MODE and
3661 MODE is redundant except when X is a CONST_INT (since they don't
3663 SIZE is an rtx for the size of data to be copied (in bytes),
3664 needed only if X is BLKmode.
3666 ALIGN (in bits) is maximum alignment we can assume.
3668 If PARTIAL and REG are both nonzero, then copy that many of the first
3669 words of X into registers starting with REG, and push the rest of X.
3670 The amount of space pushed is decreased by PARTIAL words,
3671 rounded *down* to a multiple of PARM_BOUNDARY.
3672 REG must be a hard register in this case.
3673 If REG is zero but PARTIAL is not, take any all others actions for an
3674 argument partially in registers, but do not actually load any
3677 EXTRA is the amount in bytes of extra space to leave next to this arg.
3678 This is ignored if an argument block has already been allocated.
3680 On a machine that lacks real push insns, ARGS_ADDR is the address of
3681 the bottom of the argument block for this call. We use indexing off there
3682 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3683 argument block has not been preallocated.
3685 ARGS_SO_FAR is the size of args previously pushed for this call.
3687 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3688 for arguments passed in registers. If nonzero, it will be the number
3689 of bytes required. */
3692 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3693 args_addr, args_so_far, reg_parm_stack_space,
3696 enum machine_mode mode;
3705 int reg_parm_stack_space;
3709 enum direction stack_direction
3710 #ifdef STACK_GROWS_DOWNWARD
3716 /* Decide where to pad the argument: `downward' for below,
3717 `upward' for above, or `none' for don't pad it.
3718 Default is below for small data on big-endian machines; else above. */
3719 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3721 /* Invert direction if stack is post-decrement.
3723 if (STACK_PUSH_CODE == POST_DEC)
3724 if (where_pad != none)
3725 where_pad = (where_pad == downward ? upward : downward);
3727 xinner = x = protect_from_queue (x, 0);
3729 if (mode == BLKmode)
3731 /* Copy a block into the stack, entirely or partially. */
3734 int used = partial * UNITS_PER_WORD;
3735 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3743 /* USED is now the # of bytes we need not copy to the stack
3744 because registers will take care of them. */
3747 xinner = adjust_address (xinner, BLKmode, used);
3749 /* If the partial register-part of the arg counts in its stack size,
3750 skip the part of stack space corresponding to the registers.
3751 Otherwise, start copying to the beginning of the stack space,
3752 by setting SKIP to 0. */
3753 skip = (reg_parm_stack_space == 0) ? 0 : used;
3755 #ifdef PUSH_ROUNDING
3756 /* Do it with several push insns if that doesn't take lots of insns
3757 and if there is no difficulty with push insns that skip bytes
3758 on the stack for alignment purposes. */
3761 && GET_CODE (size) == CONST_INT
3763 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3764 /* Here we avoid the case of a structure whose weak alignment
3765 forces many pushes of a small amount of data,
3766 and such small pushes do rounding that causes trouble. */
3767 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3768 || align >= BIGGEST_ALIGNMENT
3769 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3770 == (align / BITS_PER_UNIT)))
3771 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3773 /* Push padding now if padding above and stack grows down,
3774 or if padding below and stack grows up.
3775 But if space already allocated, this has already been done. */
3776 if (extra && args_addr == 0
3777 && where_pad != none && where_pad != stack_direction)
3778 anti_adjust_stack (GEN_INT (extra));
3780 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3783 #endif /* PUSH_ROUNDING */
3787 /* Otherwise make space on the stack and copy the data
3788 to the address of that space. */
3790 /* Deduct words put into registers from the size we must copy. */
3793 if (GET_CODE (size) == CONST_INT)
3794 size = GEN_INT (INTVAL (size) - used);
3796 size = expand_binop (GET_MODE (size), sub_optab, size,
3797 GEN_INT (used), NULL_RTX, 0,
3801 /* Get the address of the stack space.
3802 In this case, we do not deal with EXTRA separately.
3803 A single stack adjust will do. */
3806 temp = push_block (size, extra, where_pad == downward);
3809 else if (GET_CODE (args_so_far) == CONST_INT)
3810 temp = memory_address (BLKmode,
3811 plus_constant (args_addr,
3812 skip + INTVAL (args_so_far)));
3814 temp = memory_address (BLKmode,
3815 plus_constant (gen_rtx_PLUS (Pmode,
3820 if (!ACCUMULATE_OUTGOING_ARGS)
3822 /* If the source is referenced relative to the stack pointer,
3823 copy it to another register to stabilize it. We do not need
3824 to do this if we know that we won't be changing sp. */
3826 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3827 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3828 temp = copy_to_reg (temp);
3831 target = gen_rtx_MEM (BLKmode, temp);
3835 set_mem_attributes (target, type, 1);
3836 /* Function incoming arguments may overlap with sibling call
3837 outgoing arguments and we cannot allow reordering of reads
3838 from function arguments with stores to outgoing arguments
3839 of sibling calls. */
3840 set_mem_alias_set (target, 0);
3843 /* ALIGN may well be better aligned than TYPE, e.g. due to
3844 PARM_BOUNDARY. Assume the caller isn't lying. */
3845 set_mem_align (target, align);
3847 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3850 else if (partial > 0)
3852 /* Scalar partly in registers. */
3854 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3857 /* # words of start of argument
3858 that we must make space for but need not store. */
3859 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3860 int args_offset = INTVAL (args_so_far);
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3870 /* If we make space by pushing it, we might as well push
3871 the real data. Otherwise, we can leave OFFSET nonzero
3872 and leave the space uninitialized. */
3876 /* Now NOT_STACK gets the number of words that we don't need to
3877 allocate on the stack. */
3878 not_stack = partial - offset;
3880 /* If the partial register-part of the arg counts in its stack size,
3881 skip the part of stack space corresponding to the registers.
3882 Otherwise, start copying to the beginning of the stack space,
3883 by setting SKIP to 0. */
3884 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3886 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3887 x = validize_mem (force_const_mem (mode, x));
3889 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3890 SUBREGs of such registers are not allowed. */
3891 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3892 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3893 x = copy_to_reg (x);
3895 /* Loop over all the words allocated on the stack for this arg. */
3896 /* We can do it by words, because any scalar bigger than a word
3897 has a size a multiple of a word. */
3898 #ifndef PUSH_ARGS_REVERSED
3899 for (i = not_stack; i < size; i++)
3901 for (i = size - 1; i >= not_stack; i--)
3903 if (i >= not_stack + offset)
3904 emit_push_insn (operand_subword_force (x, i, mode),
3905 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3907 GEN_INT (args_offset + ((i - not_stack + skip)
3909 reg_parm_stack_space, alignment_pad);
3914 rtx target = NULL_RTX;
3917 /* Push padding now if padding above and stack grows down,
3918 or if padding below and stack grows up.
3919 But if space already allocated, this has already been done. */
3920 if (extra && args_addr == 0
3921 && where_pad != none && where_pad != stack_direction)
3922 anti_adjust_stack (GEN_INT (extra));
3924 #ifdef PUSH_ROUNDING
3925 if (args_addr == 0 && PUSH_ARGS)
3926 emit_single_push_insn (mode, x, type);
3930 if (GET_CODE (args_so_far) == CONST_INT)
3932 = memory_address (mode,
3933 plus_constant (args_addr,
3934 INTVAL (args_so_far)));
3936 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3939 dest = gen_rtx_MEM (mode, addr);
3942 set_mem_attributes (dest, type, 1);
3943 /* Function incoming arguments may overlap with sibling call
3944 outgoing arguments and we cannot allow reordering of reads
3945 from function arguments with stores to outgoing arguments
3946 of sibling calls. */
3947 set_mem_alias_set (dest, 0);
3950 emit_move_insn (dest, x);
3954 /* If part should go in registers, copy that part
3955 into the appropriate registers. Do this now, at the end,
3956 since mem-to-mem copies above may do function calls. */
3957 if (partial > 0 && reg != 0)
3959 /* Handle calls that pass values in multiple non-contiguous locations.
3960 The Irix 6 ABI has examples of this. */
3961 if (GET_CODE (reg) == PARALLEL)
3962 emit_group_load (reg, x, -1); /* ??? size? */
3964 move_block_to_reg (REGNO (reg), x, partial, mode);
3967 if (extra && args_addr == 0 && where_pad == stack_direction)
3968 anti_adjust_stack (GEN_INT (extra));
3970 if (alignment_pad && args_addr == 0)
3971 anti_adjust_stack (alignment_pad);
3974 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3982 /* Only registers can be subtargets. */
3983 || GET_CODE (x) != REG
3984 /* If the register is readonly, it can't be set more than once. */
3985 || RTX_UNCHANGING_P (x)
3986 /* Don't use hard regs to avoid extending their life. */
3987 || REGNO (x) < FIRST_PSEUDO_REGISTER
3988 /* Avoid subtargets inside loops,
3989 since they hide some invariant expressions. */
3990 || preserve_subexpressions_p ())
3994 /* Expand an assignment that stores the value of FROM into TO.
3995 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3996 (This may contain a QUEUED rtx;
3997 if the value is constant, this rtx is a constant.)
3998 Otherwise, the returned value is NULL_RTX.
4000 SUGGEST_REG is no longer actually used.
4001 It used to mean, copy the value through a register
4002 and return that register, if that is possible.
4003 We now use WANT_VALUE to decide whether to do this. */
4006 expand_assignment (to, from, want_value, suggest_reg)
4009 int suggest_reg ATTRIBUTE_UNUSED;
4014 /* Don't crash if the lhs of the assignment was erroneous. */
4016 if (TREE_CODE (to) == ERROR_MARK)
4018 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4019 return want_value ? result : NULL_RTX;
4022 /* Assignment of a structure component needs special treatment
4023 if the structure component's rtx is not simply a MEM.
4024 Assignment of an array element at a constant index, and assignment of
4025 an array element in an unaligned packed structure field, has the same
4028 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4029 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
4031 enum machine_mode mode1;
4032 HOST_WIDE_INT bitsize, bitpos;
4040 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4041 &unsignedp, &volatilep);
4043 /* If we are going to use store_bit_field and extract_bit_field,
4044 make sure to_rtx will be safe for multiple use. */
4046 if (mode1 == VOIDmode && want_value)
4047 tem = stabilize_reference (tem);
4049 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4053 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4055 if (GET_CODE (to_rtx) != MEM)
4058 #ifdef POINTERS_EXTEND_UNSIGNED
4059 if (GET_MODE (offset_rtx) != Pmode)
4060 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4062 if (GET_MODE (offset_rtx) != ptr_mode)
4063 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4066 /* A constant address in TO_RTX can have VOIDmode, we must not try
4067 to call force_reg for that case. Avoid that case. */
4068 if (GET_CODE (to_rtx) == MEM
4069 && GET_MODE (to_rtx) == BLKmode
4070 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4072 && (bitpos % bitsize) == 0
4073 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4074 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4076 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4080 to_rtx = offset_address (to_rtx, offset_rtx,
4081 highest_pow2_factor_for_type (TREE_TYPE (to),
4085 if (GET_CODE (to_rtx) == MEM)
4087 /* If the field is at offset zero, we could have been given the
4088 DECL_RTX of the parent struct. Don't munge it. */
4089 to_rtx = shallow_copy_rtx (to_rtx);
4091 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4094 /* Deal with volatile and readonly fields. The former is only done
4095 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4096 if (volatilep && GET_CODE (to_rtx) == MEM)
4098 if (to_rtx == orig_to_rtx)
4099 to_rtx = copy_rtx (to_rtx);
4100 MEM_VOLATILE_P (to_rtx) = 1;
4103 if (TREE_CODE (to) == COMPONENT_REF
4104 && TREE_READONLY (TREE_OPERAND (to, 1)))
4106 if (to_rtx == orig_to_rtx)
4107 to_rtx = copy_rtx (to_rtx);
4108 RTX_UNCHANGING_P (to_rtx) = 1;
4111 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4113 if (to_rtx == orig_to_rtx)
4114 to_rtx = copy_rtx (to_rtx);
4115 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4118 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4120 /* Spurious cast for HPUX compiler. */
4121 ? ((enum machine_mode)
4122 TYPE_MODE (TREE_TYPE (to)))
4124 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4126 preserve_temp_slots (result);
4130 /* If the value is meaningful, convert RESULT to the proper mode.
4131 Otherwise, return nothing. */
4132 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4133 TYPE_MODE (TREE_TYPE (from)),
4135 TREE_UNSIGNED (TREE_TYPE (to)))
4139 /* If the rhs is a function call and its value is not an aggregate,
4140 call the function before we start to compute the lhs.
4141 This is needed for correct code for cases such as
4142 val = setjmp (buf) on machines where reference to val
4143 requires loading up part of an address in a separate insn.
4145 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4146 since it might be a promoted variable where the zero- or sign- extension
4147 needs to be done. Handling this in the normal way is safe because no
4148 computation is done before the call. */
4149 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4150 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4151 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4152 && GET_CODE (DECL_RTL (to)) == REG))
4157 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4159 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4161 /* Handle calls that return values in multiple non-contiguous locations.
4162 The Irix 6 ABI has examples of this. */
4163 if (GET_CODE (to_rtx) == PARALLEL)
4164 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4165 else if (GET_MODE (to_rtx) == BLKmode)
4166 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4169 #ifdef POINTERS_EXTEND_UNSIGNED
4170 if (POINTER_TYPE_P (TREE_TYPE (to))
4171 && GET_MODE (to_rtx) != GET_MODE (value))
4172 value = convert_memory_address (GET_MODE (to_rtx), value);
4174 emit_move_insn (to_rtx, value);
4176 preserve_temp_slots (to_rtx);
4179 return want_value ? to_rtx : NULL_RTX;
4182 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4183 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4186 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4188 /* Don't move directly into a return register. */
4189 if (TREE_CODE (to) == RESULT_DECL
4190 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4195 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4197 if (GET_CODE (to_rtx) == PARALLEL)
4198 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4200 emit_move_insn (to_rtx, temp);
4202 preserve_temp_slots (to_rtx);
4205 return want_value ? to_rtx : NULL_RTX;
4208 /* In case we are returning the contents of an object which overlaps
4209 the place the value is being stored, use a safe function when copying
4210 a value through a pointer into a structure value return block. */
4211 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4212 && current_function_returns_struct
4213 && !current_function_returns_pcc_struct)
4218 size = expr_size (from);
4219 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4221 if (TARGET_MEM_FUNCTIONS)
4222 emit_library_call (memmove_libfunc, LCT_NORMAL,
4223 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4224 XEXP (from_rtx, 0), Pmode,
4225 convert_to_mode (TYPE_MODE (sizetype),
4226 size, TREE_UNSIGNED (sizetype)),
4227 TYPE_MODE (sizetype));
4229 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4230 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4231 XEXP (to_rtx, 0), Pmode,
4232 convert_to_mode (TYPE_MODE (integer_type_node),
4234 TREE_UNSIGNED (integer_type_node)),
4235 TYPE_MODE (integer_type_node));
4237 preserve_temp_slots (to_rtx);
4240 return want_value ? to_rtx : NULL_RTX;
4243 /* Compute FROM and store the value in the rtx we got. */
4246 result = store_expr (from, to_rtx, want_value);
4247 preserve_temp_slots (result);
4250 return want_value ? result : NULL_RTX;
4253 /* Generate code for computing expression EXP,
4254 and storing the value into TARGET.
4255 TARGET may contain a QUEUED rtx.
4257 If WANT_VALUE is nonzero, return a copy of the value
4258 not in TARGET, so that we can be sure to use the proper
4259 value in a containing expression even if TARGET has something
4260 else stored in it. If possible, we copy the value through a pseudo
4261 and return that pseudo. Or, if the value is constant, we try to
4262 return the constant. In some cases, we return a pseudo
4263 copied *from* TARGET.
4265 If the mode is BLKmode then we may return TARGET itself.
4266 It turns out that in BLKmode it doesn't cause a problem.
4267 because C has no operators that could combine two different
4268 assignments into the same BLKmode object with different values
4269 with no sequence point. Will other languages need this to
4272 If WANT_VALUE is 0, we return NULL, to make sure
4273 to catch quickly any cases where the caller uses the value
4274 and fails to set WANT_VALUE. */
4277 store_expr (exp, target, want_value)
4283 int dont_return_target = 0;
4284 int dont_store_target = 0;
4286 if (TREE_CODE (exp) == COMPOUND_EXPR)
4288 /* Perform first part of compound expression, then assign from second
4290 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4292 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4294 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4296 /* For conditional expression, get safe form of the target. Then
4297 test the condition, doing the appropriate assignment on either
4298 side. This avoids the creation of unnecessary temporaries.
4299 For non-BLKmode, it is more efficient not to do this. */
4301 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4304 target = protect_from_queue (target, 1);
4306 do_pending_stack_adjust ();
4308 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4309 start_cleanup_deferral ();
4310 store_expr (TREE_OPERAND (exp, 1), target, 0);
4311 end_cleanup_deferral ();
4313 emit_jump_insn (gen_jump (lab2));
4316 start_cleanup_deferral ();
4317 store_expr (TREE_OPERAND (exp, 2), target, 0);
4318 end_cleanup_deferral ();
4323 return want_value ? target : NULL_RTX;
4325 else if (queued_subexp_p (target))
4326 /* If target contains a postincrement, let's not risk
4327 using it as the place to generate the rhs. */
4329 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4331 /* Expand EXP into a new pseudo. */
4332 temp = gen_reg_rtx (GET_MODE (target));
4333 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4336 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4338 /* If target is volatile, ANSI requires accessing the value
4339 *from* the target, if it is accessed. So make that happen.
4340 In no case return the target itself. */
4341 if (! MEM_VOLATILE_P (target) && want_value)
4342 dont_return_target = 1;
4344 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4345 && GET_MODE (target) != BLKmode)
4346 /* If target is in memory and caller wants value in a register instead,
4347 arrange that. Pass TARGET as target for expand_expr so that,
4348 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4349 We know expand_expr will not use the target in that case.
4350 Don't do this if TARGET is volatile because we are supposed
4351 to write it and then read it. */
4353 temp = expand_expr (exp, target, GET_MODE (target), 0);
4354 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4356 /* If TEMP is already in the desired TARGET, only copy it from
4357 memory and don't store it there again. */
4359 || (rtx_equal_p (temp, target)
4360 && ! side_effects_p (temp) && ! side_effects_p (target)))
4361 dont_store_target = 1;
4362 temp = copy_to_reg (temp);
4364 dont_return_target = 1;
4366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4367 /* If this is an scalar in a register that is stored in a wider mode
4368 than the declared mode, compute the result into its declared mode
4369 and then convert to the wider mode. Our value is the computed
4372 rtx inner_target = 0;
4374 /* If we don't want a value, we can do the conversion inside EXP,
4375 which will often result in some optimizations. Do the conversion
4376 in two steps: first change the signedness, if needed, then
4377 the extend. But don't do this if the type of EXP is a subtype
4378 of something else since then the conversion might involve
4379 more than just converting modes. */
4380 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4381 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4383 if (TREE_UNSIGNED (TREE_TYPE (exp))
4384 != SUBREG_PROMOTED_UNSIGNED_P (target))
4386 ((*lang_hooks.types.signed_or_unsigned_type)
4387 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4389 exp = convert ((*lang_hooks.types.type_for_mode)
4390 (GET_MODE (SUBREG_REG (target)),
4391 SUBREG_PROMOTED_UNSIGNED_P (target)),
4394 inner_target = SUBREG_REG (target);
4397 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4399 /* If TEMP is a volatile MEM and we want a result value, make
4400 the access now so it gets done only once. Likewise if
4401 it contains TARGET. */
4402 if (GET_CODE (temp) == MEM && want_value
4403 && (MEM_VOLATILE_P (temp)
4404 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4405 temp = copy_to_reg (temp);
4407 /* If TEMP is a VOIDmode constant, use convert_modes to make
4408 sure that we properly convert it. */
4409 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4411 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4412 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4413 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4414 GET_MODE (target), temp,
4415 SUBREG_PROMOTED_UNSIGNED_P (target));
4418 convert_move (SUBREG_REG (target), temp,
4419 SUBREG_PROMOTED_UNSIGNED_P (target));
4421 /* If we promoted a constant, change the mode back down to match
4422 target. Otherwise, the caller might get confused by a result whose
4423 mode is larger than expected. */
4425 if (want_value && GET_MODE (temp) != GET_MODE (target))
4427 if (GET_MODE (temp) != VOIDmode)
4429 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4430 SUBREG_PROMOTED_VAR_P (temp) = 1;
4431 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4432 SUBREG_PROMOTED_UNSIGNED_P (target));
4435 temp = convert_modes (GET_MODE (target),
4436 GET_MODE (SUBREG_REG (target)),
4437 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4440 return want_value ? temp : NULL_RTX;
4444 temp = expand_expr (exp, target, GET_MODE (target), 0);
4445 /* Return TARGET if it's a specified hardware register.
4446 If TARGET is a volatile mem ref, either return TARGET
4447 or return a reg copied *from* TARGET; ANSI requires this.
4449 Otherwise, if TEMP is not TARGET, return TEMP
4450 if it is constant (for efficiency),
4451 or if we really want the correct value. */
4452 if (!(target && GET_CODE (target) == REG
4453 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4454 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4455 && ! rtx_equal_p (temp, target)
4456 && (CONSTANT_P (temp) || want_value))
4457 dont_return_target = 1;
4460 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4461 the same as that of TARGET, adjust the constant. This is needed, for
4462 example, in case it is a CONST_DOUBLE and we want only a word-sized
4464 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4465 && TREE_CODE (exp) != ERROR_MARK
4466 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4467 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4468 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4470 /* If value was not generated in the target, store it there.
4471 Convert the value to TARGET's type first if necessary.
4472 If TEMP and TARGET compare equal according to rtx_equal_p, but
4473 one or both of them are volatile memory refs, we have to distinguish
4475 - expand_expr has used TARGET. In this case, we must not generate
4476 another copy. This can be detected by TARGET being equal according
4478 - expand_expr has not used TARGET - that means that the source just
4479 happens to have the same RTX form. Since temp will have been created
4480 by expand_expr, it will compare unequal according to == .
4481 We must generate a copy in this case, to reach the correct number
4482 of volatile memory references. */
4484 if ((! rtx_equal_p (temp, target)
4485 || (temp != target && (side_effects_p (temp)
4486 || side_effects_p (target))))
4487 && TREE_CODE (exp) != ERROR_MARK
4488 && ! dont_store_target
4489 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4490 but TARGET is not valid memory reference, TEMP will differ
4491 from TARGET although it is really the same location. */
4492 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4493 || target != DECL_RTL_IF_SET (exp))
4494 /* If there's nothing to copy, don't bother. Don't call expr_size
4495 unless necessary, because some front-ends (C++) expr_size-hook
4496 aborts on objects that are not supposed to be bit-copied or
4498 && expr_size (exp) != const0_rtx)
4500 target = protect_from_queue (target, 1);
4501 if (GET_MODE (temp) != GET_MODE (target)
4502 && GET_MODE (temp) != VOIDmode)
4504 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4505 if (dont_return_target)
4507 /* In this case, we will return TEMP,
4508 so make sure it has the proper mode.
4509 But don't forget to store the value into TARGET. */
4510 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4511 emit_move_insn (target, temp);
4514 convert_move (target, temp, unsignedp);
4517 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4519 /* Handle copying a string constant into an array. The string
4520 constant may be shorter than the array. So copy just the string's
4521 actual length, and clear the rest. First get the size of the data
4522 type of the string, which is actually the size of the target. */
4523 rtx size = expr_size (exp);
4525 if (GET_CODE (size) == CONST_INT
4526 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4527 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4530 /* Compute the size of the data to copy from the string. */
4532 = size_binop (MIN_EXPR,
4533 make_tree (sizetype, size),
4534 size_int (TREE_STRING_LENGTH (exp)));
4535 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4539 /* Copy that much. */
4540 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4541 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4543 /* Figure out how much is left in TARGET that we have to clear.
4544 Do all calculations in ptr_mode. */
4545 if (GET_CODE (copy_size_rtx) == CONST_INT)
4547 size = plus_constant (size, -INTVAL (copy_size_rtx));
4548 target = adjust_address (target, BLKmode,
4549 INTVAL (copy_size_rtx));
4553 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4554 copy_size_rtx, NULL_RTX, 0,
4557 #ifdef POINTERS_EXTEND_UNSIGNED
4558 if (GET_MODE (copy_size_rtx) != Pmode)
4559 copy_size_rtx = convert_memory_address (Pmode,
4563 target = offset_address (target, copy_size_rtx,
4564 highest_pow2_factor (copy_size));
4565 label = gen_label_rtx ();
4566 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4567 GET_MODE (size), 0, label);
4570 if (size != const0_rtx)
4571 clear_storage (target, size);
4577 /* Handle calls that return values in multiple non-contiguous locations.
4578 The Irix 6 ABI has examples of this. */
4579 else if (GET_CODE (target) == PARALLEL)
4580 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4581 else if (GET_MODE (temp) == BLKmode)
4582 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4584 emit_move_insn (target, temp);
4587 /* If we don't want a value, return NULL_RTX. */
4591 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4592 ??? The latter test doesn't seem to make sense. */
4593 else if (dont_return_target && GET_CODE (temp) != MEM)
4596 /* Return TARGET itself if it is a hard register. */
4597 else if (want_value && GET_MODE (target) != BLKmode
4598 && ! (GET_CODE (target) == REG
4599 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4600 return copy_to_reg (target);
4606 /* Return 1 if EXP just contains zeros. */
4614 switch (TREE_CODE (exp))
4618 case NON_LVALUE_EXPR:
4619 case VIEW_CONVERT_EXPR:
4620 return is_zeros_p (TREE_OPERAND (exp, 0));
4623 return integer_zerop (exp);
4627 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4630 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4633 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4634 elt = TREE_CHAIN (elt))
4635 if (!is_zeros_p (TREE_VALUE (elt)))
4641 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4642 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4643 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4644 if (! is_zeros_p (TREE_VALUE (elt)))
4654 /* Return 1 if EXP contains mostly (3/4) zeros. */
4657 mostly_zeros_p (exp)
4660 if (TREE_CODE (exp) == CONSTRUCTOR)
4662 int elts = 0, zeros = 0;
4663 tree elt = CONSTRUCTOR_ELTS (exp);
4664 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4666 /* If there are no ranges of true bits, it is all zero. */
4667 return elt == NULL_TREE;
4669 for (; elt; elt = TREE_CHAIN (elt))
4671 /* We do not handle the case where the index is a RANGE_EXPR,
4672 so the statistic will be somewhat inaccurate.
4673 We do make a more accurate count in store_constructor itself,
4674 so since this function is only used for nested array elements,
4675 this should be close enough. */
4676 if (mostly_zeros_p (TREE_VALUE (elt)))
4681 return 4 * zeros >= 3 * elts;
4684 return is_zeros_p (exp);
4687 /* Helper function for store_constructor.
4688 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4689 TYPE is the type of the CONSTRUCTOR, not the element type.
4690 CLEARED is as for store_constructor.
4691 ALIAS_SET is the alias set to use for any stores.
4693 This provides a recursive shortcut back to store_constructor when it isn't
4694 necessary to go through store_field. This is so that we can pass through
4695 the cleared field to let store_constructor know that we may not have to
4696 clear a substructure if the outer structure has already been cleared. */
4699 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4702 unsigned HOST_WIDE_INT bitsize;
4703 HOST_WIDE_INT bitpos;
4704 enum machine_mode mode;
4709 if (TREE_CODE (exp) == CONSTRUCTOR
4710 && bitpos % BITS_PER_UNIT == 0
4711 /* If we have a nonzero bitpos for a register target, then we just
4712 let store_field do the bitfield handling. This is unlikely to
4713 generate unnecessary clear instructions anyways. */
4714 && (bitpos == 0 || GET_CODE (target) == MEM))
4716 if (GET_CODE (target) == MEM)
4718 = adjust_address (target,
4719 GET_MODE (target) == BLKmode
4721 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4722 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4725 /* Update the alias set, if required. */
4726 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4727 && MEM_ALIAS_SET (target) != 0)
4729 target = copy_rtx (target);
4730 set_mem_alias_set (target, alias_set);
4733 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4736 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4740 /* Store the value of constructor EXP into the rtx TARGET.
4741 TARGET is either a REG or a MEM; we know it cannot conflict, since
4742 safe_from_p has been called.
4743 CLEARED is true if TARGET is known to have been zero'd.
4744 SIZE is the number of bytes of TARGET we are allowed to modify: this
4745 may not be the same as the size of EXP if we are assigning to a field
4746 which has been packed to exclude padding bits. */
4749 store_constructor (exp, target, cleared, size)
4755 tree type = TREE_TYPE (exp);
4756 #ifdef WORD_REGISTER_OPERATIONS
4757 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4760 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4761 || TREE_CODE (type) == QUAL_UNION_TYPE)
4765 /* We either clear the aggregate or indicate the value is dead. */
4766 if ((TREE_CODE (type) == UNION_TYPE
4767 || TREE_CODE (type) == QUAL_UNION_TYPE)
4769 && ! CONSTRUCTOR_ELTS (exp))
4770 /* If the constructor is empty, clear the union. */
4772 clear_storage (target, expr_size (exp));
4776 /* If we are building a static constructor into a register,
4777 set the initial value as zero so we can fold the value into
4778 a constant. But if more than one register is involved,
4779 this probably loses. */
4780 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4781 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4783 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4787 /* If the constructor has fewer fields than the structure
4788 or if we are initializing the structure to mostly zeros,
4789 clear the whole structure first. Don't do this if TARGET is a
4790 register whose mode size isn't equal to SIZE since clear_storage
4791 can't handle this case. */
4792 else if (! cleared && size > 0
4793 && ((list_length (CONSTRUCTOR_ELTS (exp))
4794 != fields_length (type))
4795 || mostly_zeros_p (exp))
4796 && (GET_CODE (target) != REG
4797 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4800 clear_storage (target, GEN_INT (size));
4805 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4807 /* Store each element of the constructor into
4808 the corresponding field of TARGET. */
4810 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4812 tree field = TREE_PURPOSE (elt);
4813 tree value = TREE_VALUE (elt);
4814 enum machine_mode mode;
4815 HOST_WIDE_INT bitsize;
4816 HOST_WIDE_INT bitpos = 0;
4819 rtx to_rtx = target;
4821 /* Just ignore missing fields.
4822 We cleared the whole structure, above,
4823 if any fields are missing. */
4827 if (cleared && is_zeros_p (value))
4830 if (host_integerp (DECL_SIZE (field), 1))
4831 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4835 unsignedp = TREE_UNSIGNED (field);
4836 mode = DECL_MODE (field);
4837 if (DECL_BIT_FIELD (field))
4840 offset = DECL_FIELD_OFFSET (field);
4841 if (host_integerp (offset, 0)
4842 && host_integerp (bit_position (field), 0))
4844 bitpos = int_bit_position (field);
4848 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4854 if (contains_placeholder_p (offset))
4855 offset = build (WITH_RECORD_EXPR, sizetype,
4856 offset, make_tree (TREE_TYPE (exp), target));
4858 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4859 if (GET_CODE (to_rtx) != MEM)
4862 #ifdef POINTERS_EXTEND_UNSIGNED
4863 if (GET_MODE (offset_rtx) != Pmode)
4864 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4866 if (GET_MODE (offset_rtx) != ptr_mode)
4867 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4870 to_rtx = offset_address (to_rtx, offset_rtx,
4871 highest_pow2_factor (offset));
4874 if (TREE_READONLY (field))
4876 if (GET_CODE (to_rtx) == MEM)
4877 to_rtx = copy_rtx (to_rtx);
4879 RTX_UNCHANGING_P (to_rtx) = 1;
4882 #ifdef WORD_REGISTER_OPERATIONS
4883 /* If this initializes a field that is smaller than a word, at the
4884 start of a word, try to widen it to a full word.
4885 This special case allows us to output C++ member function
4886 initializations in a form that the optimizers can understand. */
4887 if (GET_CODE (target) == REG
4888 && bitsize < BITS_PER_WORD
4889 && bitpos % BITS_PER_WORD == 0
4890 && GET_MODE_CLASS (mode) == MODE_INT
4891 && TREE_CODE (value) == INTEGER_CST
4893 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4895 tree type = TREE_TYPE (value);
4897 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4899 type = (*lang_hooks.types.type_for_size)
4900 (BITS_PER_WORD, TREE_UNSIGNED (type));
4901 value = convert (type, value);
4904 if (BYTES_BIG_ENDIAN)
4906 = fold (build (LSHIFT_EXPR, type, value,
4907 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4908 bitsize = BITS_PER_WORD;
4913 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4914 && DECL_NONADDRESSABLE_P (field))
4916 to_rtx = copy_rtx (to_rtx);
4917 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4920 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4921 value, type, cleared,
4922 get_alias_set (TREE_TYPE (field)));
4925 else if (TREE_CODE (type) == ARRAY_TYPE
4926 || TREE_CODE (type) == VECTOR_TYPE)
4931 tree domain = TYPE_DOMAIN (type);
4932 tree elttype = TREE_TYPE (type);
4934 HOST_WIDE_INT minelt = 0;
4935 HOST_WIDE_INT maxelt = 0;
4937 /* Vectors are like arrays, but the domain is stored via an array
4939 if (TREE_CODE (type) == VECTOR_TYPE)
4941 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4942 the same field as TYPE_DOMAIN, we are not guaranteed that
4944 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4945 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4948 const_bounds_p = (TYPE_MIN_VALUE (domain)
4949 && TYPE_MAX_VALUE (domain)
4950 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4951 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4953 /* If we have constant bounds for the range of the type, get them. */
4956 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4957 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4960 /* If the constructor has fewer elements than the array,
4961 clear the whole array first. Similarly if this is
4962 static constructor of a non-BLKmode object. */
4963 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4967 HOST_WIDE_INT count = 0, zero_count = 0;
4968 need_to_clear = ! const_bounds_p;
4970 /* This loop is a more accurate version of the loop in
4971 mostly_zeros_p (it handles RANGE_EXPR in an index).
4972 It is also needed to check for missing elements. */
4973 for (elt = CONSTRUCTOR_ELTS (exp);
4974 elt != NULL_TREE && ! need_to_clear;
4975 elt = TREE_CHAIN (elt))
4977 tree index = TREE_PURPOSE (elt);
4978 HOST_WIDE_INT this_node_count;
4980 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4982 tree lo_index = TREE_OPERAND (index, 0);
4983 tree hi_index = TREE_OPERAND (index, 1);
4985 if (! host_integerp (lo_index, 1)
4986 || ! host_integerp (hi_index, 1))
4992 this_node_count = (tree_low_cst (hi_index, 1)
4993 - tree_low_cst (lo_index, 1) + 1);
4996 this_node_count = 1;
4998 count += this_node_count;
4999 if (mostly_zeros_p (TREE_VALUE (elt)))
5000 zero_count += this_node_count;
5003 /* Clear the entire array first if there are any missing elements,
5004 or if the incidence of zero elements is >= 75%. */
5006 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5010 if (need_to_clear && size > 0)
5015 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5017 clear_storage (target, GEN_INT (size));
5021 else if (REG_P (target))
5022 /* Inform later passes that the old value is dead. */
5023 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5025 /* Store each element of the constructor into
5026 the corresponding element of TARGET, determined
5027 by counting the elements. */
5028 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5030 elt = TREE_CHAIN (elt), i++)
5032 enum machine_mode mode;
5033 HOST_WIDE_INT bitsize;
5034 HOST_WIDE_INT bitpos;
5036 tree value = TREE_VALUE (elt);
5037 tree index = TREE_PURPOSE (elt);
5038 rtx xtarget = target;
5040 if (cleared && is_zeros_p (value))
5043 unsignedp = TREE_UNSIGNED (elttype);
5044 mode = TYPE_MODE (elttype);
5045 if (mode == BLKmode)
5046 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5047 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5050 bitsize = GET_MODE_BITSIZE (mode);
5052 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5054 tree lo_index = TREE_OPERAND (index, 0);
5055 tree hi_index = TREE_OPERAND (index, 1);
5056 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5057 struct nesting *loop;
5058 HOST_WIDE_INT lo, hi, count;
5061 /* If the range is constant and "small", unroll the loop. */
5063 && host_integerp (lo_index, 0)
5064 && host_integerp (hi_index, 0)
5065 && (lo = tree_low_cst (lo_index, 0),
5066 hi = tree_low_cst (hi_index, 0),
5067 count = hi - lo + 1,
5068 (GET_CODE (target) != MEM
5070 || (host_integerp (TYPE_SIZE (elttype), 1)
5071 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5074 lo -= minelt; hi -= minelt;
5075 for (; lo <= hi; lo++)
5077 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5079 if (GET_CODE (target) == MEM
5080 && !MEM_KEEP_ALIAS_SET_P (target)
5081 && TREE_CODE (type) == ARRAY_TYPE
5082 && TYPE_NONALIASED_COMPONENT (type))
5084 target = copy_rtx (target);
5085 MEM_KEEP_ALIAS_SET_P (target) = 1;
5088 store_constructor_field
5089 (target, bitsize, bitpos, mode, value, type, cleared,
5090 get_alias_set (elttype));
5095 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5096 loop_top = gen_label_rtx ();
5097 loop_end = gen_label_rtx ();
5099 unsignedp = TREE_UNSIGNED (domain);
5101 index = build_decl (VAR_DECL, NULL_TREE, domain);
5104 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5106 SET_DECL_RTL (index, index_r);
5107 if (TREE_CODE (value) == SAVE_EXPR
5108 && SAVE_EXPR_RTL (value) == 0)
5110 /* Make sure value gets expanded once before the
5112 expand_expr (value, const0_rtx, VOIDmode, 0);
5115 store_expr (lo_index, index_r, 0);
5116 loop = expand_start_loop (0);
5118 /* Assign value to element index. */
5120 = convert (ssizetype,
5121 fold (build (MINUS_EXPR, TREE_TYPE (index),
5122 index, TYPE_MIN_VALUE (domain))));
5123 position = size_binop (MULT_EXPR, position,
5125 TYPE_SIZE_UNIT (elttype)));
5127 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5128 xtarget = offset_address (target, pos_rtx,
5129 highest_pow2_factor (position));
5130 xtarget = adjust_address (xtarget, mode, 0);
5131 if (TREE_CODE (value) == CONSTRUCTOR)
5132 store_constructor (value, xtarget, cleared,
5133 bitsize / BITS_PER_UNIT);
5135 store_expr (value, xtarget, 0);
5137 expand_exit_loop_if_false (loop,
5138 build (LT_EXPR, integer_type_node,
5141 expand_increment (build (PREINCREMENT_EXPR,
5143 index, integer_one_node), 0, 0);
5145 emit_label (loop_end);
5148 else if ((index != 0 && ! host_integerp (index, 0))
5149 || ! host_integerp (TYPE_SIZE (elttype), 1))
5154 index = ssize_int (1);
5157 index = convert (ssizetype,
5158 fold (build (MINUS_EXPR, index,
5159 TYPE_MIN_VALUE (domain))));
5161 position = size_binop (MULT_EXPR, index,
5163 TYPE_SIZE_UNIT (elttype)));
5164 xtarget = offset_address (target,
5165 expand_expr (position, 0, VOIDmode, 0),
5166 highest_pow2_factor (position));
5167 xtarget = adjust_address (xtarget, mode, 0);
5168 store_expr (value, xtarget, 0);
5173 bitpos = ((tree_low_cst (index, 0) - minelt)
5174 * tree_low_cst (TYPE_SIZE (elttype), 1));
5176 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5178 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5179 && TREE_CODE (type) == ARRAY_TYPE
5180 && TYPE_NONALIASED_COMPONENT (type))
5182 target = copy_rtx (target);
5183 MEM_KEEP_ALIAS_SET_P (target) = 1;
5186 store_constructor_field (target, bitsize, bitpos, mode, value,
5187 type, cleared, get_alias_set (elttype));
5193 /* Set constructor assignments. */
5194 else if (TREE_CODE (type) == SET_TYPE)
5196 tree elt = CONSTRUCTOR_ELTS (exp);
5197 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5198 tree domain = TYPE_DOMAIN (type);
5199 tree domain_min, domain_max, bitlength;
5201 /* The default implementation strategy is to extract the constant
5202 parts of the constructor, use that to initialize the target,
5203 and then "or" in whatever non-constant ranges we need in addition.
5205 If a large set is all zero or all ones, it is
5206 probably better to set it using memset (if available) or bzero.
5207 Also, if a large set has just a single range, it may also be
5208 better to first clear all the first clear the set (using
5209 bzero/memset), and set the bits we want. */
5211 /* Check for all zeros. */
5212 if (elt == NULL_TREE && size > 0)
5215 clear_storage (target, GEN_INT (size));
5219 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5220 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5221 bitlength = size_binop (PLUS_EXPR,
5222 size_diffop (domain_max, domain_min),
5225 nbits = tree_low_cst (bitlength, 1);
5227 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5228 are "complicated" (more than one range), initialize (the
5229 constant parts) by copying from a constant. */
5230 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5231 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5233 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5234 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5235 char *bit_buffer = (char *) alloca (nbits);
5236 HOST_WIDE_INT word = 0;
5237 unsigned int bit_pos = 0;
5238 unsigned int ibit = 0;
5239 unsigned int offset = 0; /* In bytes from beginning of set. */
5241 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5244 if (bit_buffer[ibit])
5246 if (BYTES_BIG_ENDIAN)
5247 word |= (1 << (set_word_size - 1 - bit_pos));
5249 word |= 1 << bit_pos;
5253 if (bit_pos >= set_word_size || ibit == nbits)
5255 if (word != 0 || ! cleared)
5257 rtx datum = GEN_INT (word);
5260 /* The assumption here is that it is safe to use
5261 XEXP if the set is multi-word, but not if
5262 it's single-word. */
5263 if (GET_CODE (target) == MEM)
5264 to_rtx = adjust_address (target, mode, offset);
5265 else if (offset == 0)
5269 emit_move_insn (to_rtx, datum);
5276 offset += set_word_size / BITS_PER_UNIT;
5281 /* Don't bother clearing storage if the set is all ones. */
5282 if (TREE_CHAIN (elt) != NULL_TREE
5283 || (TREE_PURPOSE (elt) == NULL_TREE
5285 : ( ! host_integerp (TREE_VALUE (elt), 0)
5286 || ! host_integerp (TREE_PURPOSE (elt), 0)
5287 || (tree_low_cst (TREE_VALUE (elt), 0)
5288 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5289 != (HOST_WIDE_INT) nbits))))
5290 clear_storage (target, expr_size (exp));
5292 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5294 /* Start of range of element or NULL. */
5295 tree startbit = TREE_PURPOSE (elt);
5296 /* End of range of element, or element value. */
5297 tree endbit = TREE_VALUE (elt);
5298 HOST_WIDE_INT startb, endb;
5299 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5301 bitlength_rtx = expand_expr (bitlength,
5302 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5304 /* Handle non-range tuple element like [ expr ]. */
5305 if (startbit == NULL_TREE)
5307 startbit = save_expr (endbit);
5311 startbit = convert (sizetype, startbit);
5312 endbit = convert (sizetype, endbit);
5313 if (! integer_zerop (domain_min))
5315 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5316 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5318 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5319 EXPAND_CONST_ADDRESS);
5320 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5321 EXPAND_CONST_ADDRESS);
5327 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5328 (GET_MODE (target), 0),
5331 emit_move_insn (targetx, target);
5334 else if (GET_CODE (target) == MEM)
5339 /* Optimization: If startbit and endbit are constants divisible
5340 by BITS_PER_UNIT, call memset instead. */
5341 if (TARGET_MEM_FUNCTIONS
5342 && TREE_CODE (startbit) == INTEGER_CST
5343 && TREE_CODE (endbit) == INTEGER_CST
5344 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5345 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5347 emit_library_call (memset_libfunc, LCT_NORMAL,
5349 plus_constant (XEXP (targetx, 0),
5350 startb / BITS_PER_UNIT),
5352 constm1_rtx, TYPE_MODE (integer_type_node),
5353 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5354 TYPE_MODE (sizetype));
5357 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5358 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5359 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5360 startbit_rtx, TYPE_MODE (sizetype),
5361 endbit_rtx, TYPE_MODE (sizetype));
5364 emit_move_insn (target, targetx);
5372 /* Store the value of EXP (an expression tree)
5373 into a subfield of TARGET which has mode MODE and occupies
5374 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5375 If MODE is VOIDmode, it means that we are storing into a bit-field.
5377 If VALUE_MODE is VOIDmode, return nothing in particular.
5378 UNSIGNEDP is not used in this case.
5380 Otherwise, return an rtx for the value stored. This rtx
5381 has mode VALUE_MODE if that is convenient to do.
5382 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5384 TYPE is the type of the underlying object,
5386 ALIAS_SET is the alias set for the destination. This value will
5387 (in general) be different from that for TARGET, since TARGET is a
5388 reference to the containing structure. */
5391 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5394 HOST_WIDE_INT bitsize;
5395 HOST_WIDE_INT bitpos;
5396 enum machine_mode mode;
5398 enum machine_mode value_mode;
5403 HOST_WIDE_INT width_mask = 0;
5405 if (TREE_CODE (exp) == ERROR_MARK)
5408 /* If we have nothing to store, do nothing unless the expression has
5411 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5412 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5413 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5415 /* If we are storing into an unaligned field of an aligned union that is
5416 in a register, we may have the mode of TARGET being an integer mode but
5417 MODE == BLKmode. In that case, get an aligned object whose size and
5418 alignment are the same as TARGET and store TARGET into it (we can avoid
5419 the store if the field being stored is the entire width of TARGET). Then
5420 call ourselves recursively to store the field into a BLKmode version of
5421 that object. Finally, load from the object into TARGET. This is not
5422 very efficient in general, but should only be slightly more expensive
5423 than the otherwise-required unaligned accesses. Perhaps this can be
5424 cleaned up later. */
5427 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5431 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5433 rtx blk_object = adjust_address (object, BLKmode, 0);
5435 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5436 emit_move_insn (object, target);
5438 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5441 emit_move_insn (target, object);
5443 /* We want to return the BLKmode version of the data. */
5447 if (GET_CODE (target) == CONCAT)
5449 /* We're storing into a struct containing a single __complex. */
5453 return store_expr (exp, target, 0);
5456 /* If the structure is in a register or if the component
5457 is a bit field, we cannot use addressing to access it.
5458 Use bit-field techniques or SUBREG to store in it. */
5460 if (mode == VOIDmode
5461 || (mode != BLKmode && ! direct_store[(int) mode]
5462 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5463 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5464 || GET_CODE (target) == REG
5465 || GET_CODE (target) == SUBREG
5466 /* If the field isn't aligned enough to store as an ordinary memref,
5467 store it as a bit field. */
5468 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5469 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5470 || bitpos % GET_MODE_ALIGNMENT (mode)))
5471 /* If the RHS and field are a constant size and the size of the
5472 RHS isn't the same size as the bitfield, we must use bitfield
5475 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5476 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5478 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5480 /* If BITSIZE is narrower than the size of the type of EXP
5481 we will be narrowing TEMP. Normally, what's wanted are the
5482 low-order bits. However, if EXP's type is a record and this is
5483 big-endian machine, we want the upper BITSIZE bits. */
5484 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5485 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5486 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5487 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5488 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5492 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5494 if (mode != VOIDmode && mode != BLKmode
5495 && mode != TYPE_MODE (TREE_TYPE (exp)))
5496 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5498 /* If the modes of TARGET and TEMP are both BLKmode, both
5499 must be in memory and BITPOS must be aligned on a byte
5500 boundary. If so, we simply do a block copy. */
5501 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5503 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5504 || bitpos % BITS_PER_UNIT != 0)
5507 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5508 emit_block_move (target, temp,
5509 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5513 return value_mode == VOIDmode ? const0_rtx : target;
5516 /* Store the value in the bitfield. */
5517 store_bit_field (target, bitsize, bitpos, mode, temp,
5518 int_size_in_bytes (type));
5520 if (value_mode != VOIDmode)
5522 /* The caller wants an rtx for the value.
5523 If possible, avoid refetching from the bitfield itself. */
5525 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5528 enum machine_mode tmode;
5530 tmode = GET_MODE (temp);
5531 if (tmode == VOIDmode)
5535 return expand_and (tmode, temp,
5536 gen_int_mode (width_mask, tmode),
5539 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5540 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5541 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5544 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5545 NULL_RTX, value_mode, VOIDmode,
5546 int_size_in_bytes (type));
5552 rtx addr = XEXP (target, 0);
5553 rtx to_rtx = target;
5555 /* If a value is wanted, it must be the lhs;
5556 so make the address stable for multiple use. */
5558 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5559 && ! CONSTANT_ADDRESS_P (addr)
5560 /* A frame-pointer reference is already stable. */
5561 && ! (GET_CODE (addr) == PLUS
5562 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5563 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5564 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5565 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5567 /* Now build a reference to just the desired component. */
5569 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5571 if (to_rtx == target)
5572 to_rtx = copy_rtx (to_rtx);
5574 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5575 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5576 set_mem_alias_set (to_rtx, alias_set);
5578 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5582 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5583 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5584 codes and find the ultimate containing object, which we return.
5586 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5587 bit position, and *PUNSIGNEDP to the signedness of the field.
5588 If the position of the field is variable, we store a tree
5589 giving the variable offset (in units) in *POFFSET.
5590 This offset is in addition to the bit position.
5591 If the position is not variable, we store 0 in *POFFSET.
5593 If any of the extraction expressions is volatile,
5594 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5596 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5597 is a mode that can be used to access the field. In that case, *PBITSIZE
5600 If the field describes a variable-sized object, *PMODE is set to
5601 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5602 this case, but the address of the object can be found. */
5605 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5606 punsignedp, pvolatilep)
5608 HOST_WIDE_INT *pbitsize;
5609 HOST_WIDE_INT *pbitpos;
5611 enum machine_mode *pmode;
5616 enum machine_mode mode = VOIDmode;
5617 tree offset = size_zero_node;
5618 tree bit_offset = bitsize_zero_node;
5619 tree placeholder_ptr = 0;
5622 /* First get the mode, signedness, and size. We do this from just the
5623 outermost expression. */
5624 if (TREE_CODE (exp) == COMPONENT_REF)
5626 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5627 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5628 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5630 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5632 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5634 size_tree = TREE_OPERAND (exp, 1);
5635 *punsignedp = TREE_UNSIGNED (exp);
5639 mode = TYPE_MODE (TREE_TYPE (exp));
5640 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5642 if (mode == BLKmode)
5643 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5645 *pbitsize = GET_MODE_BITSIZE (mode);
5650 if (! host_integerp (size_tree, 1))
5651 mode = BLKmode, *pbitsize = -1;
5653 *pbitsize = tree_low_cst (size_tree, 1);
5656 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5657 and find the ultimate containing object. */
5660 if (TREE_CODE (exp) == BIT_FIELD_REF)
5661 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5662 else if (TREE_CODE (exp) == COMPONENT_REF)
5664 tree field = TREE_OPERAND (exp, 1);
5665 tree this_offset = DECL_FIELD_OFFSET (field);
5667 /* If this field hasn't been filled in yet, don't go
5668 past it. This should only happen when folding expressions
5669 made during type construction. */
5670 if (this_offset == 0)
5672 else if (! TREE_CONSTANT (this_offset)
5673 && contains_placeholder_p (this_offset))
5674 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5676 offset = size_binop (PLUS_EXPR, offset, this_offset);
5677 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5678 DECL_FIELD_BIT_OFFSET (field));
5680 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5683 else if (TREE_CODE (exp) == ARRAY_REF
5684 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5686 tree index = TREE_OPERAND (exp, 1);
5687 tree array = TREE_OPERAND (exp, 0);
5688 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5689 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5690 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5692 /* We assume all arrays have sizes that are a multiple of a byte.
5693 First subtract the lower bound, if any, in the type of the
5694 index, then convert to sizetype and multiply by the size of the
5696 if (low_bound != 0 && ! integer_zerop (low_bound))
5697 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5700 /* If the index has a self-referential type, pass it to a
5701 WITH_RECORD_EXPR; if the component size is, pass our
5702 component to one. */
5703 if (! TREE_CONSTANT (index)
5704 && contains_placeholder_p (index))
5705 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5706 if (! TREE_CONSTANT (unit_size)
5707 && contains_placeholder_p (unit_size))
5708 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5710 offset = size_binop (PLUS_EXPR, offset,
5711 size_binop (MULT_EXPR,
5712 convert (sizetype, index),
5716 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5718 tree new = find_placeholder (exp, &placeholder_ptr);
5720 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5721 We might have been called from tree optimization where we
5722 haven't set up an object yet. */
5730 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5731 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5732 && ! ((TREE_CODE (exp) == NOP_EXPR
5733 || TREE_CODE (exp) == CONVERT_EXPR)
5734 && (TYPE_MODE (TREE_TYPE (exp))
5735 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5738 /* If any reference in the chain is volatile, the effect is volatile. */
5739 if (TREE_THIS_VOLATILE (exp))
5742 exp = TREE_OPERAND (exp, 0);
5745 /* If OFFSET is constant, see if we can return the whole thing as a
5746 constant bit position. Otherwise, split it up. */
5747 if (host_integerp (offset, 0)
5748 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5750 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5751 && host_integerp (tem, 0))
5752 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5754 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5760 /* Return 1 if T is an expression that get_inner_reference handles. */
5763 handled_component_p (t)
5766 switch (TREE_CODE (t))
5771 case ARRAY_RANGE_REF:
5772 case NON_LVALUE_EXPR:
5773 case VIEW_CONVERT_EXPR:
5778 return (TYPE_MODE (TREE_TYPE (t))
5779 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5786 /* Given an rtx VALUE that may contain additions and multiplications, return
5787 an equivalent value that just refers to a register, memory, or constant.
5788 This is done by generating instructions to perform the arithmetic and
5789 returning a pseudo-register containing the value.
5791 The returned value may be a REG, SUBREG, MEM or constant. */
5794 force_operand (value, target)
5798 /* Use subtarget as the target for operand 0 of a binary operation. */
5799 rtx subtarget = get_subtarget (target);
5800 enum rtx_code code = GET_CODE (value);
5802 /* Check for a PIC address load. */
5803 if ((code == PLUS || code == MINUS)
5804 && XEXP (value, 0) == pic_offset_table_rtx
5805 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5806 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5807 || GET_CODE (XEXP (value, 1)) == CONST))
5810 subtarget = gen_reg_rtx (GET_MODE (value));
5811 emit_move_insn (subtarget, value);
5815 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5818 target = gen_reg_rtx (GET_MODE (value));
5819 convert_move (target, force_operand (XEXP (value, 0), NULL),
5820 code == ZERO_EXTEND);
5824 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5826 op2 = XEXP (value, 1);
5827 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5829 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5832 op2 = negate_rtx (GET_MODE (value), op2);
5835 /* Check for an addition with OP2 a constant integer and our first
5836 operand a PLUS of a virtual register and something else. In that
5837 case, we want to emit the sum of the virtual register and the
5838 constant first and then add the other value. This allows virtual
5839 register instantiation to simply modify the constant rather than
5840 creating another one around this addition. */
5841 if (code == PLUS && GET_CODE (op2) == CONST_INT
5842 && GET_CODE (XEXP (value, 0)) == PLUS
5843 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5844 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5845 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5847 rtx temp = expand_simple_binop (GET_MODE (value), code,
5848 XEXP (XEXP (value, 0), 0), op2,
5849 subtarget, 0, OPTAB_LIB_WIDEN);
5850 return expand_simple_binop (GET_MODE (value), code, temp,
5851 force_operand (XEXP (XEXP (value,
5853 target, 0, OPTAB_LIB_WIDEN);
5856 op1 = force_operand (XEXP (value, 0), subtarget);
5857 op2 = force_operand (op2, NULL_RTX);
5861 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5863 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5864 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5865 target, 1, OPTAB_LIB_WIDEN);
5867 return expand_divmod (0,
5868 FLOAT_MODE_P (GET_MODE (value))
5869 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5870 GET_MODE (value), op1, op2, target, 0);
5873 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5877 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5881 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5885 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5886 target, 0, OPTAB_LIB_WIDEN);
5889 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5890 target, 1, OPTAB_LIB_WIDEN);
5893 if (GET_RTX_CLASS (code) == '1')
5895 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5896 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5899 #ifdef INSN_SCHEDULING
5900 /* On machines that have insn scheduling, we want all memory reference to be
5901 explicit, so we need to deal with such paradoxical SUBREGs. */
5902 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5903 && (GET_MODE_SIZE (GET_MODE (value))
5904 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5906 = simplify_gen_subreg (GET_MODE (value),
5907 force_reg (GET_MODE (SUBREG_REG (value)),
5908 force_operand (SUBREG_REG (value),
5910 GET_MODE (SUBREG_REG (value)),
5911 SUBREG_BYTE (value));
5917 /* Subroutine of expand_expr: return nonzero iff there is no way that
5918 EXP can reference X, which is being modified. TOP_P is nonzero if this
5919 call is going to be used to determine whether we need a temporary
5920 for EXP, as opposed to a recursive call to this function.
5922 It is always safe for this routine to return zero since it merely
5923 searches for optimization opportunities. */
5926 safe_from_p (x, exp, top_p)
5933 static tree save_expr_list;
5936 /* If EXP has varying size, we MUST use a target since we currently
5937 have no way of allocating temporaries of variable size
5938 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5939 So we assume here that something at a higher level has prevented a
5940 clash. This is somewhat bogus, but the best we can do. Only
5941 do this when X is BLKmode and when we are at the top level. */
5942 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5943 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5944 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5945 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5946 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5948 && GET_MODE (x) == BLKmode)
5949 /* If X is in the outgoing argument area, it is always safe. */
5950 || (GET_CODE (x) == MEM
5951 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5952 || (GET_CODE (XEXP (x, 0)) == PLUS
5953 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5956 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5957 find the underlying pseudo. */
5958 if (GET_CODE (x) == SUBREG)
5961 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5965 /* A SAVE_EXPR might appear many times in the expression passed to the
5966 top-level safe_from_p call, and if it has a complex subexpression,
5967 examining it multiple times could result in a combinatorial explosion.
5968 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5969 with optimization took about 28 minutes to compile -- even though it was
5970 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5971 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5972 we have processed. Note that the only test of top_p was above. */
5981 rtn = safe_from_p (x, exp, 0);
5983 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5984 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5989 /* Now look at our tree code and possibly recurse. */
5990 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5993 exp_rtl = DECL_RTL_IF_SET (exp);
6000 if (TREE_CODE (exp) == TREE_LIST)
6001 return ((TREE_VALUE (exp) == 0
6002 || safe_from_p (x, TREE_VALUE (exp), 0))
6003 && (TREE_CHAIN (exp) == 0
6004 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6005 else if (TREE_CODE (exp) == ERROR_MARK)
6006 return 1; /* An already-visited SAVE_EXPR? */
6011 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6015 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6016 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6020 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6021 the expression. If it is set, we conflict iff we are that rtx or
6022 both are in memory. Otherwise, we check all operands of the
6023 expression recursively. */
6025 switch (TREE_CODE (exp))
6028 /* If the operand is static or we are static, we can't conflict.
6029 Likewise if we don't conflict with the operand at all. */
6030 if (staticp (TREE_OPERAND (exp, 0))
6031 || TREE_STATIC (exp)
6032 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6035 /* Otherwise, the only way this can conflict is if we are taking
6036 the address of a DECL a that address if part of X, which is
6038 exp = TREE_OPERAND (exp, 0);
6041 if (!DECL_RTL_SET_P (exp)
6042 || GET_CODE (DECL_RTL (exp)) != MEM)
6045 exp_rtl = XEXP (DECL_RTL (exp), 0);
6050 if (GET_CODE (x) == MEM
6051 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6052 get_alias_set (exp)))
6057 /* Assume that the call will clobber all hard registers and
6059 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6060 || GET_CODE (x) == MEM)
6065 /* If a sequence exists, we would have to scan every instruction
6066 in the sequence to see if it was safe. This is probably not
6068 if (RTL_EXPR_SEQUENCE (exp))
6071 exp_rtl = RTL_EXPR_RTL (exp);
6074 case WITH_CLEANUP_EXPR:
6075 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6078 case CLEANUP_POINT_EXPR:
6079 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6082 exp_rtl = SAVE_EXPR_RTL (exp);
6086 /* If we've already scanned this, don't do it again. Otherwise,
6087 show we've scanned it and record for clearing the flag if we're
6089 if (TREE_PRIVATE (exp))
6092 TREE_PRIVATE (exp) = 1;
6093 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6095 TREE_PRIVATE (exp) = 0;
6099 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6103 /* The only operand we look at is operand 1. The rest aren't
6104 part of the expression. */
6105 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6107 case METHOD_CALL_EXPR:
6108 /* This takes an rtx argument, but shouldn't appear here. */
6115 /* If we have an rtx, we do not need to scan our operands. */
6119 nops = first_rtl_op (TREE_CODE (exp));
6120 for (i = 0; i < nops; i++)
6121 if (TREE_OPERAND (exp, i) != 0
6122 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6125 /* If this is a language-specific tree code, it may require
6126 special handling. */
6127 if ((unsigned int) TREE_CODE (exp)
6128 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6129 && !(*lang_hooks.safe_from_p) (x, exp))
6133 /* If we have an rtl, find any enclosed object. Then see if we conflict
6137 if (GET_CODE (exp_rtl) == SUBREG)
6139 exp_rtl = SUBREG_REG (exp_rtl);
6140 if (GET_CODE (exp_rtl) == REG
6141 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6145 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6146 are memory and they conflict. */
6147 return ! (rtx_equal_p (x, exp_rtl)
6148 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6149 && true_dependence (exp_rtl, VOIDmode, x,
6150 rtx_addr_varies_p)));
6153 /* If we reach here, it is safe. */
6157 /* Subroutine of expand_expr: return rtx if EXP is a
6158 variable or parameter; else return 0. */
6165 switch (TREE_CODE (exp))
6169 return DECL_RTL (exp);
6175 #ifdef MAX_INTEGER_COMPUTATION_MODE
6178 check_max_integer_computation_mode (exp)
6181 enum tree_code code;
6182 enum machine_mode mode;
6184 /* Strip any NOPs that don't change the mode. */
6186 code = TREE_CODE (exp);
6188 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6189 if (code == NOP_EXPR
6190 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6193 /* First check the type of the overall operation. We need only look at
6194 unary, binary and relational operations. */
6195 if (TREE_CODE_CLASS (code) == '1'
6196 || TREE_CODE_CLASS (code) == '2'
6197 || TREE_CODE_CLASS (code) == '<')
6199 mode = TYPE_MODE (TREE_TYPE (exp));
6200 if (GET_MODE_CLASS (mode) == MODE_INT
6201 && mode > MAX_INTEGER_COMPUTATION_MODE)
6202 internal_error ("unsupported wide integer operation");
6205 /* Check operand of a unary op. */
6206 if (TREE_CODE_CLASS (code) == '1')
6208 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6209 if (GET_MODE_CLASS (mode) == MODE_INT
6210 && mode > MAX_INTEGER_COMPUTATION_MODE)
6211 internal_error ("unsupported wide integer operation");
6214 /* Check operands of a binary/comparison op. */
6215 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6217 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6218 if (GET_MODE_CLASS (mode) == MODE_INT
6219 && mode > MAX_INTEGER_COMPUTATION_MODE)
6220 internal_error ("unsupported wide integer operation");
6222 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6223 if (GET_MODE_CLASS (mode) == MODE_INT
6224 && mode > MAX_INTEGER_COMPUTATION_MODE)
6225 internal_error ("unsupported wide integer operation");
6230 /* Return the highest power of two that EXP is known to be a multiple of.
6231 This is used in updating alignment of MEMs in array references. */
6233 static HOST_WIDE_INT
6234 highest_pow2_factor (exp)
6237 HOST_WIDE_INT c0, c1;
6239 switch (TREE_CODE (exp))
6242 /* We can find the lowest bit that's a one. If the low
6243 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6244 We need to handle this case since we can find it in a COND_EXPR,
6245 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6246 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6248 if (TREE_CONSTANT_OVERFLOW (exp))
6249 return BIGGEST_ALIGNMENT;
6252 /* Note: tree_low_cst is intentionally not used here,
6253 we don't care about the upper bits. */
6254 c0 = TREE_INT_CST_LOW (exp);
6256 return c0 ? c0 : BIGGEST_ALIGNMENT;
6260 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6261 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6262 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6263 return MIN (c0, c1);
6266 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6267 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6270 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6272 if (integer_pow2p (TREE_OPERAND (exp, 1))
6273 && host_integerp (TREE_OPERAND (exp, 1), 1))
6275 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6276 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6277 return MAX (1, c0 / c1);
6281 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6282 case SAVE_EXPR: case WITH_RECORD_EXPR:
6283 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6286 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6289 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6290 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6291 return MIN (c0, c1);
6300 /* Similar, except that it is known that the expression must be a multiple
6301 of the alignment of TYPE. */
6303 static HOST_WIDE_INT
6304 highest_pow2_factor_for_type (type, exp)
6308 HOST_WIDE_INT type_align, factor;
6310 factor = highest_pow2_factor (exp);
6311 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6312 return MAX (factor, type_align);
6315 /* Return an object on the placeholder list that matches EXP, a
6316 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6317 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6318 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6319 is a location which initially points to a starting location in the
6320 placeholder list (zero means start of the list) and where a pointer into
6321 the placeholder list at which the object is found is placed. */
6324 find_placeholder (exp, plist)
6328 tree type = TREE_TYPE (exp);
6329 tree placeholder_expr;
6331 for (placeholder_expr
6332 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6333 placeholder_expr != 0;
6334 placeholder_expr = TREE_CHAIN (placeholder_expr))
6336 tree need_type = TYPE_MAIN_VARIANT (type);
6339 /* Find the outermost reference that is of the type we want. If none,
6340 see if any object has a type that is a pointer to the type we
6342 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6343 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6344 || TREE_CODE (elt) == COND_EXPR)
6345 ? TREE_OPERAND (elt, 1)
6346 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6347 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6348 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6349 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6350 ? TREE_OPERAND (elt, 0) : 0))
6351 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6354 *plist = placeholder_expr;
6358 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6360 = ((TREE_CODE (elt) == COMPOUND_EXPR
6361 || TREE_CODE (elt) == COND_EXPR)
6362 ? TREE_OPERAND (elt, 1)
6363 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6364 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6365 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6366 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6367 ? TREE_OPERAND (elt, 0) : 0))
6368 if (POINTER_TYPE_P (TREE_TYPE (elt))
6369 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6373 *plist = placeholder_expr;
6374 return build1 (INDIRECT_REF, need_type, elt);
6381 /* expand_expr: generate code for computing expression EXP.
6382 An rtx for the computed value is returned. The value is never null.
6383 In the case of a void EXP, const0_rtx is returned.
6385 The value may be stored in TARGET if TARGET is nonzero.
6386 TARGET is just a suggestion; callers must assume that
6387 the rtx returned may not be the same as TARGET.
6389 If TARGET is CONST0_RTX, it means that the value will be ignored.
6391 If TMODE is not VOIDmode, it suggests generating the
6392 result in mode TMODE. But this is done only when convenient.
6393 Otherwise, TMODE is ignored and the value generated in its natural mode.
6394 TMODE is just a suggestion; callers must assume that
6395 the rtx returned may not have mode TMODE.
6397 Note that TARGET may have neither TMODE nor MODE. In that case, it
6398 probably will not be used.
6400 If MODIFIER is EXPAND_SUM then when EXP is an addition
6401 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6402 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6403 products as above, or REG or MEM, or constant.
6404 Ordinarily in such cases we would output mul or add instructions
6405 and then return a pseudo reg containing the sum.
6407 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6408 it also marks a label as absolutely required (it can't be dead).
6409 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6410 This is used for outputting expressions used in initializers.
6412 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6413 with a constant address even if that address is not normally legitimate.
6414 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6417 expand_expr (exp, target, tmode, modifier)
6420 enum machine_mode tmode;
6421 enum expand_modifier modifier;
6424 tree type = TREE_TYPE (exp);
6425 int unsignedp = TREE_UNSIGNED (type);
6426 enum machine_mode mode;
6427 enum tree_code code = TREE_CODE (exp);
6429 rtx subtarget, original_target;
6433 /* Handle ERROR_MARK before anybody tries to access its type. */
6434 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6436 op0 = CONST0_RTX (tmode);
6442 mode = TYPE_MODE (type);
6443 /* Use subtarget as the target for operand 0 of a binary operation. */
6444 subtarget = get_subtarget (target);
6445 original_target = target;
6446 ignore = (target == const0_rtx
6447 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6448 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6449 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6450 && TREE_CODE (type) == VOID_TYPE));
6452 /* If we are going to ignore this result, we need only do something
6453 if there is a side-effect somewhere in the expression. If there
6454 is, short-circuit the most common cases here. Note that we must
6455 not call expand_expr with anything but const0_rtx in case this
6456 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6460 if (! TREE_SIDE_EFFECTS (exp))
6463 /* Ensure we reference a volatile object even if value is ignored, but
6464 don't do this if all we are doing is taking its address. */
6465 if (TREE_THIS_VOLATILE (exp)
6466 && TREE_CODE (exp) != FUNCTION_DECL
6467 && mode != VOIDmode && mode != BLKmode
6468 && modifier != EXPAND_CONST_ADDRESS)
6470 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6471 if (GET_CODE (temp) == MEM)
6472 temp = copy_to_reg (temp);
6476 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6477 || code == INDIRECT_REF || code == BUFFER_REF)
6478 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6481 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6482 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6484 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6485 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6488 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6489 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6490 /* If the second operand has no side effects, just evaluate
6492 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6494 else if (code == BIT_FIELD_REF)
6496 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6497 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6498 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6505 #ifdef MAX_INTEGER_COMPUTATION_MODE
6506 /* Only check stuff here if the mode we want is different from the mode
6507 of the expression; if it's the same, check_max_integer_computiation_mode
6508 will handle it. Do we really need to check this stuff at all? */
6511 && GET_MODE (target) != mode
6512 && TREE_CODE (exp) != INTEGER_CST
6513 && TREE_CODE (exp) != PARM_DECL
6514 && TREE_CODE (exp) != ARRAY_REF
6515 && TREE_CODE (exp) != ARRAY_RANGE_REF
6516 && TREE_CODE (exp) != COMPONENT_REF
6517 && TREE_CODE (exp) != BIT_FIELD_REF
6518 && TREE_CODE (exp) != INDIRECT_REF
6519 && TREE_CODE (exp) != CALL_EXPR
6520 && TREE_CODE (exp) != VAR_DECL
6521 && TREE_CODE (exp) != RTL_EXPR)
6523 enum machine_mode mode = GET_MODE (target);
6525 if (GET_MODE_CLASS (mode) == MODE_INT
6526 && mode > MAX_INTEGER_COMPUTATION_MODE)
6527 internal_error ("unsupported wide integer operation");
6531 && TREE_CODE (exp) != INTEGER_CST
6532 && TREE_CODE (exp) != PARM_DECL
6533 && TREE_CODE (exp) != ARRAY_REF
6534 && TREE_CODE (exp) != ARRAY_RANGE_REF
6535 && TREE_CODE (exp) != COMPONENT_REF
6536 && TREE_CODE (exp) != BIT_FIELD_REF
6537 && TREE_CODE (exp) != INDIRECT_REF
6538 && TREE_CODE (exp) != VAR_DECL
6539 && TREE_CODE (exp) != CALL_EXPR
6540 && TREE_CODE (exp) != RTL_EXPR
6541 && GET_MODE_CLASS (tmode) == MODE_INT
6542 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6543 internal_error ("unsupported wide integer operation");
6545 check_max_integer_computation_mode (exp);
6548 /* If will do cse, generate all results into pseudo registers
6549 since 1) that allows cse to find more things
6550 and 2) otherwise cse could produce an insn the machine
6551 cannot support. And exception is a CONSTRUCTOR into a multi-word
6552 MEM: that's much more likely to be most efficient into the MEM. */
6554 if (! cse_not_expected && mode != BLKmode && target
6555 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6556 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6563 tree function = decl_function_context (exp);
6564 /* Handle using a label in a containing function. */
6565 if (function != current_function_decl
6566 && function != inline_function_decl && function != 0)
6568 struct function *p = find_function_data (function);
6569 p->expr->x_forced_labels
6570 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6571 p->expr->x_forced_labels);
6575 if (modifier == EXPAND_INITIALIZER)
6576 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6581 temp = gen_rtx_MEM (FUNCTION_MODE,
6582 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6583 if (function != current_function_decl
6584 && function != inline_function_decl && function != 0)
6585 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6590 if (!DECL_RTL_SET_P (exp))
6592 error_with_decl (exp, "prior parameter's size depends on `%s'");
6593 return CONST0_RTX (mode);
6596 /* ... fall through ... */
6599 /* If a static var's type was incomplete when the decl was written,
6600 but the type is complete now, lay out the decl now. */
6601 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6602 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6604 rtx value = DECL_RTL_IF_SET (exp);
6606 layout_decl (exp, 0);
6608 /* If the RTL was already set, update its mode and memory
6612 PUT_MODE (value, DECL_MODE (exp));
6613 SET_DECL_RTL (exp, 0);
6614 set_mem_attributes (value, exp, 1);
6615 SET_DECL_RTL (exp, value);
6619 /* ... fall through ... */
6623 if (DECL_RTL (exp) == 0)
6626 /* Ensure variable marked as used even if it doesn't go through
6627 a parser. If it hasn't be used yet, write out an external
6629 if (! TREE_USED (exp))
6631 assemble_external (exp);
6632 TREE_USED (exp) = 1;
6635 /* Show we haven't gotten RTL for this yet. */
6638 /* Handle variables inherited from containing functions. */
6639 context = decl_function_context (exp);
6641 /* We treat inline_function_decl as an alias for the current function
6642 because that is the inline function whose vars, types, etc.
6643 are being merged into the current function.
6644 See expand_inline_function. */
6646 if (context != 0 && context != current_function_decl
6647 && context != inline_function_decl
6648 /* If var is static, we don't need a static chain to access it. */
6649 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6650 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6654 /* Mark as non-local and addressable. */
6655 DECL_NONLOCAL (exp) = 1;
6656 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6658 (*lang_hooks.mark_addressable) (exp);
6659 if (GET_CODE (DECL_RTL (exp)) != MEM)
6661 addr = XEXP (DECL_RTL (exp), 0);
6662 if (GET_CODE (addr) == MEM)
6664 = replace_equiv_address (addr,
6665 fix_lexical_addr (XEXP (addr, 0), exp));
6667 addr = fix_lexical_addr (addr, exp);
6669 temp = replace_equiv_address (DECL_RTL (exp), addr);
6672 /* This is the case of an array whose size is to be determined
6673 from its initializer, while the initializer is still being parsed.
6676 else if (GET_CODE (DECL_RTL (exp)) == MEM
6677 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6678 temp = validize_mem (DECL_RTL (exp));
6680 /* If DECL_RTL is memory, we are in the normal case and either
6681 the address is not valid or it is not a register and -fforce-addr
6682 is specified, get the address into a register. */
6684 else if (GET_CODE (DECL_RTL (exp)) == MEM
6685 && modifier != EXPAND_CONST_ADDRESS
6686 && modifier != EXPAND_SUM
6687 && modifier != EXPAND_INITIALIZER
6688 && (! memory_address_p (DECL_MODE (exp),
6689 XEXP (DECL_RTL (exp), 0))
6691 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6692 temp = replace_equiv_address (DECL_RTL (exp),
6693 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6695 /* If we got something, return it. But first, set the alignment
6696 if the address is a register. */
6699 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6700 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6705 /* If the mode of DECL_RTL does not match that of the decl, it
6706 must be a promoted value. We return a SUBREG of the wanted mode,
6707 but mark it so that we know that it was already extended. */
6709 if (GET_CODE (DECL_RTL (exp)) == REG
6710 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6712 /* Get the signedness used for this variable. Ensure we get the
6713 same mode we got when the variable was declared. */
6714 if (GET_MODE (DECL_RTL (exp))
6715 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6716 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6719 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6720 SUBREG_PROMOTED_VAR_P (temp) = 1;
6721 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6725 return DECL_RTL (exp);
6728 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6729 TREE_INT_CST_HIGH (exp), mode);
6731 /* ??? If overflow is set, fold will have done an incomplete job,
6732 which can result in (plus xx (const_int 0)), which can get
6733 simplified by validate_replace_rtx during virtual register
6734 instantiation, which can result in unrecognizable insns.
6735 Avoid this by forcing all overflows into registers. */
6736 if (TREE_CONSTANT_OVERFLOW (exp)
6737 && modifier != EXPAND_INITIALIZER)
6738 temp = force_reg (mode, temp);
6743 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6746 /* If optimized, generate immediate CONST_DOUBLE
6747 which will be turned into memory by reload if necessary.
6749 We used to force a register so that loop.c could see it. But
6750 this does not allow gen_* patterns to perform optimizations with
6751 the constants. It also produces two insns in cases like "x = 1.0;".
6752 On most machines, floating-point constants are not permitted in
6753 many insns, so we'd end up copying it to a register in any case.
6755 Now, we do the copying in expand_binop, if appropriate. */
6756 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6757 TYPE_MODE (TREE_TYPE (exp)));
6761 if (! TREE_CST_RTL (exp))
6762 output_constant_def (exp, 1);
6764 /* TREE_CST_RTL probably contains a constant address.
6765 On RISC machines where a constant address isn't valid,
6766 make some insns to get that address into a register. */
6767 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6768 && modifier != EXPAND_CONST_ADDRESS
6769 && modifier != EXPAND_INITIALIZER
6770 && modifier != EXPAND_SUM
6771 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6773 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6774 return replace_equiv_address (TREE_CST_RTL (exp),
6775 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6776 return TREE_CST_RTL (exp);
6778 case EXPR_WITH_FILE_LOCATION:
6781 const char *saved_input_filename = input_filename;
6782 int saved_lineno = lineno;
6783 input_filename = EXPR_WFL_FILENAME (exp);
6784 lineno = EXPR_WFL_LINENO (exp);
6785 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6786 emit_line_note (input_filename, lineno);
6787 /* Possibly avoid switching back and forth here. */
6788 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6789 input_filename = saved_input_filename;
6790 lineno = saved_lineno;
6795 context = decl_function_context (exp);
6797 /* If this SAVE_EXPR was at global context, assume we are an
6798 initialization function and move it into our context. */
6800 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6802 /* We treat inline_function_decl as an alias for the current function
6803 because that is the inline function whose vars, types, etc.
6804 are being merged into the current function.
6805 See expand_inline_function. */
6806 if (context == current_function_decl || context == inline_function_decl)
6809 /* If this is non-local, handle it. */
6812 /* The following call just exists to abort if the context is
6813 not of a containing function. */
6814 find_function_data (context);
6816 temp = SAVE_EXPR_RTL (exp);
6817 if (temp && GET_CODE (temp) == REG)
6819 put_var_into_stack (exp);
6820 temp = SAVE_EXPR_RTL (exp);
6822 if (temp == 0 || GET_CODE (temp) != MEM)
6825 replace_equiv_address (temp,
6826 fix_lexical_addr (XEXP (temp, 0), exp));
6828 if (SAVE_EXPR_RTL (exp) == 0)
6830 if (mode == VOIDmode)
6833 temp = assign_temp (build_qualified_type (type,
6835 | TYPE_QUAL_CONST)),
6838 SAVE_EXPR_RTL (exp) = temp;
6839 if (!optimize && GET_CODE (temp) == REG)
6840 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6843 /* If the mode of TEMP does not match that of the expression, it
6844 must be a promoted value. We pass store_expr a SUBREG of the
6845 wanted mode but mark it so that we know that it was already
6846 extended. Note that `unsignedp' was modified above in
6849 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6851 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6852 SUBREG_PROMOTED_VAR_P (temp) = 1;
6853 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6856 if (temp == const0_rtx)
6857 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6859 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6861 TREE_USED (exp) = 1;
6864 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6865 must be a promoted value. We return a SUBREG of the wanted mode,
6866 but mark it so that we know that it was already extended. */
6868 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6869 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6871 /* Compute the signedness and make the proper SUBREG. */
6872 promote_mode (type, mode, &unsignedp, 0);
6873 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6874 SUBREG_PROMOTED_VAR_P (temp) = 1;
6875 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6879 return SAVE_EXPR_RTL (exp);
6884 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6885 TREE_OPERAND (exp, 0)
6886 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6890 case PLACEHOLDER_EXPR:
6892 tree old_list = placeholder_list;
6893 tree placeholder_expr = 0;
6895 exp = find_placeholder (exp, &placeholder_expr);
6899 placeholder_list = TREE_CHAIN (placeholder_expr);
6900 temp = expand_expr (exp, original_target, tmode, modifier);
6901 placeholder_list = old_list;
6905 case WITH_RECORD_EXPR:
6906 /* Put the object on the placeholder list, expand our first operand,
6907 and pop the list. */
6908 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6910 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6912 placeholder_list = TREE_CHAIN (placeholder_list);
6916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6917 expand_goto (TREE_OPERAND (exp, 0));
6919 expand_computed_goto (TREE_OPERAND (exp, 0));
6923 expand_exit_loop_if_false (NULL,
6924 invert_truthvalue (TREE_OPERAND (exp, 0)));
6927 case LABELED_BLOCK_EXPR:
6928 if (LABELED_BLOCK_BODY (exp))
6929 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6930 /* Should perhaps use expand_label, but this is simpler and safer. */
6931 do_pending_stack_adjust ();
6932 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6935 case EXIT_BLOCK_EXPR:
6936 if (EXIT_BLOCK_RETURN (exp))
6937 sorry ("returned value in block_exit_expr");
6938 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6943 expand_start_loop (1);
6944 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6952 tree vars = TREE_OPERAND (exp, 0);
6953 int vars_need_expansion = 0;
6955 /* Need to open a binding contour here because
6956 if there are any cleanups they must be contained here. */
6957 expand_start_bindings (2);
6959 /* Mark the corresponding BLOCK for output in its proper place. */
6960 if (TREE_OPERAND (exp, 2) != 0
6961 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6962 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6964 /* If VARS have not yet been expanded, expand them now. */
6967 if (!DECL_RTL_SET_P (vars))
6969 vars_need_expansion = 1;
6972 expand_decl_init (vars);
6973 vars = TREE_CHAIN (vars);
6976 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6978 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6984 if (RTL_EXPR_SEQUENCE (exp))
6986 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6988 emit_insn (RTL_EXPR_SEQUENCE (exp));
6989 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6991 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6992 free_temps_for_rtl_expr (exp);
6993 return RTL_EXPR_RTL (exp);
6996 /* If we don't need the result, just ensure we evaluate any
7002 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7003 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7008 /* All elts simple constants => refer to a constant in memory. But
7009 if this is a non-BLKmode mode, let it store a field at a time
7010 since that should make a CONST_INT or CONST_DOUBLE when we
7011 fold. Likewise, if we have a target we can use, it is best to
7012 store directly into the target unless the type is large enough
7013 that memcpy will be used. If we are making an initializer and
7014 all operands are constant, put it in memory as well.
7016 FIXME: Avoid trying to fill vector constructors piece-meal.
7017 Output them with output_constant_def below unless we're sure
7018 they're zeros. This should go away when vector initializers
7019 are treated like VECTOR_CST instead of arrays.
7021 else if ((TREE_STATIC (exp)
7022 && ((mode == BLKmode
7023 && ! (target != 0 && safe_from_p (target, exp, 1)))
7024 || TREE_ADDRESSABLE (exp)
7025 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7026 && (! MOVE_BY_PIECES_P
7027 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7029 && ((TREE_CODE (type) == VECTOR_TYPE
7030 && !is_zeros_p (exp))
7031 || ! mostly_zeros_p (exp)))))
7032 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7034 rtx constructor = output_constant_def (exp, 1);
7036 if (modifier != EXPAND_CONST_ADDRESS
7037 && modifier != EXPAND_INITIALIZER
7038 && modifier != EXPAND_SUM)
7039 constructor = validize_mem (constructor);
7045 /* Handle calls that pass values in multiple non-contiguous
7046 locations. The Irix 6 ABI has examples of this. */
7047 if (target == 0 || ! safe_from_p (target, exp, 1)
7048 || GET_CODE (target) == PARALLEL)
7050 = assign_temp (build_qualified_type (type,
7052 | (TREE_READONLY (exp)
7053 * TYPE_QUAL_CONST))),
7054 0, TREE_ADDRESSABLE (exp), 1);
7056 store_constructor (exp, target, 0, int_expr_size (exp));
7062 tree exp1 = TREE_OPERAND (exp, 0);
7064 tree string = string_constant (exp1, &index);
7066 /* Try to optimize reads from const strings. */
7068 && TREE_CODE (string) == STRING_CST
7069 && TREE_CODE (index) == INTEGER_CST
7070 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7071 && GET_MODE_CLASS (mode) == MODE_INT
7072 && GET_MODE_SIZE (mode) == 1
7073 && modifier != EXPAND_WRITE)
7074 return gen_int_mode (TREE_STRING_POINTER (string)
7075 [TREE_INT_CST_LOW (index)], mode);
7077 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7078 op0 = memory_address (mode, op0);
7079 temp = gen_rtx_MEM (mode, op0);
7080 set_mem_attributes (temp, exp, 0);
7082 /* If we are writing to this object and its type is a record with
7083 readonly fields, we must mark it as readonly so it will
7084 conflict with readonly references to those fields. */
7085 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7086 RTX_UNCHANGING_P (temp) = 1;
7092 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7096 tree array = TREE_OPERAND (exp, 0);
7097 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7098 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7099 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7102 /* Optimize the special-case of a zero lower bound.
7104 We convert the low_bound to sizetype to avoid some problems
7105 with constant folding. (E.g. suppose the lower bound is 1,
7106 and its mode is QI. Without the conversion, (ARRAY
7107 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7108 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7110 if (! integer_zerop (low_bound))
7111 index = size_diffop (index, convert (sizetype, low_bound));
7113 /* Fold an expression like: "foo"[2].
7114 This is not done in fold so it won't happen inside &.
7115 Don't fold if this is for wide characters since it's too
7116 difficult to do correctly and this is a very rare case. */
7118 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7119 && TREE_CODE (array) == STRING_CST
7120 && TREE_CODE (index) == INTEGER_CST
7121 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7122 && GET_MODE_CLASS (mode) == MODE_INT
7123 && GET_MODE_SIZE (mode) == 1)
7124 return gen_int_mode (TREE_STRING_POINTER (array)
7125 [TREE_INT_CST_LOW (index)], mode);
7127 /* If this is a constant index into a constant array,
7128 just get the value from the array. Handle both the cases when
7129 we have an explicit constructor and when our operand is a variable
7130 that was declared const. */
7132 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7133 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7134 && TREE_CODE (index) == INTEGER_CST
7135 && 0 > compare_tree_int (index,
7136 list_length (CONSTRUCTOR_ELTS
7137 (TREE_OPERAND (exp, 0)))))
7141 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7142 i = TREE_INT_CST_LOW (index);
7143 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7147 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7151 else if (optimize >= 1
7152 && modifier != EXPAND_CONST_ADDRESS
7153 && modifier != EXPAND_INITIALIZER
7154 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7155 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7156 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7158 if (TREE_CODE (index) == INTEGER_CST)
7160 tree init = DECL_INITIAL (array);
7162 if (TREE_CODE (init) == CONSTRUCTOR)
7166 for (elem = CONSTRUCTOR_ELTS (init);
7168 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7169 elem = TREE_CHAIN (elem))
7172 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7173 return expand_expr (fold (TREE_VALUE (elem)), target,
7176 else if (TREE_CODE (init) == STRING_CST
7177 && 0 > compare_tree_int (index,
7178 TREE_STRING_LENGTH (init)))
7180 tree type = TREE_TYPE (TREE_TYPE (init));
7181 enum machine_mode mode = TYPE_MODE (type);
7183 if (GET_MODE_CLASS (mode) == MODE_INT
7184 && GET_MODE_SIZE (mode) == 1)
7185 return gen_int_mode (TREE_STRING_POINTER (init)
7186 [TREE_INT_CST_LOW (index)], mode);
7195 case ARRAY_RANGE_REF:
7196 /* If the operand is a CONSTRUCTOR, we can just extract the
7197 appropriate field if it is present. Don't do this if we have
7198 already written the data since we want to refer to that copy
7199 and varasm.c assumes that's what we'll do. */
7200 if (code == COMPONENT_REF
7201 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7202 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7206 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7207 elt = TREE_CHAIN (elt))
7208 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7209 /* We can normally use the value of the field in the
7210 CONSTRUCTOR. However, if this is a bitfield in
7211 an integral mode that we can fit in a HOST_WIDE_INT,
7212 we must mask only the number of bits in the bitfield,
7213 since this is done implicitly by the constructor. If
7214 the bitfield does not meet either of those conditions,
7215 we can't do this optimization. */
7216 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7217 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7219 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7220 <= HOST_BITS_PER_WIDE_INT))))
7222 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7223 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7225 HOST_WIDE_INT bitsize
7226 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7227 enum machine_mode imode
7228 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7230 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7232 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7233 op0 = expand_and (imode, op0, op1, target);
7238 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7241 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7243 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7253 enum machine_mode mode1;
7254 HOST_WIDE_INT bitsize, bitpos;
7257 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7258 &mode1, &unsignedp, &volatilep);
7261 /* If we got back the original object, something is wrong. Perhaps
7262 we are evaluating an expression too early. In any event, don't
7263 infinitely recurse. */
7267 /* If TEM's type is a union of variable size, pass TARGET to the inner
7268 computation, since it will need a temporary and TARGET is known
7269 to have to do. This occurs in unchecked conversion in Ada. */
7273 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7274 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7276 ? target : NULL_RTX),
7278 (modifier == EXPAND_INITIALIZER
7279 || modifier == EXPAND_CONST_ADDRESS)
7280 ? modifier : EXPAND_NORMAL);
7282 /* If this is a constant, put it into a register if it is a
7283 legitimate constant and OFFSET is 0 and memory if it isn't. */
7284 if (CONSTANT_P (op0))
7286 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7287 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7289 op0 = force_reg (mode, op0);
7291 op0 = validize_mem (force_const_mem (mode, op0));
7296 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7298 /* If this object is in a register, put it into memory.
7299 This case can't occur in C, but can in Ada if we have
7300 unchecked conversion of an expression from a scalar type to
7301 an array or record type. */
7302 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7303 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7305 /* If the operand is a SAVE_EXPR, we can deal with this by
7306 forcing the SAVE_EXPR into memory. */
7307 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7309 put_var_into_stack (TREE_OPERAND (exp, 0));
7310 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7315 = build_qualified_type (TREE_TYPE (tem),
7316 (TYPE_QUALS (TREE_TYPE (tem))
7317 | TYPE_QUAL_CONST));
7318 rtx memloc = assign_temp (nt, 1, 1, 1);
7320 emit_move_insn (memloc, op0);
7325 if (GET_CODE (op0) != MEM)
7328 #ifdef POINTERS_EXTEND_UNSIGNED
7329 if (GET_MODE (offset_rtx) != Pmode)
7330 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7332 if (GET_MODE (offset_rtx) != ptr_mode)
7333 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7336 /* A constant address in OP0 can have VOIDmode, we must not try
7337 to call force_reg for that case. Avoid that case. */
7338 if (GET_CODE (op0) == MEM
7339 && GET_MODE (op0) == BLKmode
7340 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7342 && (bitpos % bitsize) == 0
7343 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7344 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7346 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7350 op0 = offset_address (op0, offset_rtx,
7351 highest_pow2_factor (offset));
7354 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7355 record its alignment as BIGGEST_ALIGNMENT. */
7356 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7357 && is_aligning_offset (offset, tem))
7358 set_mem_align (op0, BIGGEST_ALIGNMENT);
7360 /* Don't forget about volatility even if this is a bitfield. */
7361 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7363 if (op0 == orig_op0)
7364 op0 = copy_rtx (op0);
7366 MEM_VOLATILE_P (op0) = 1;
7369 /* The following code doesn't handle CONCAT.
7370 Assume only bitpos == 0 can be used for CONCAT, due to
7371 one element arrays having the same mode as its element. */
7372 if (GET_CODE (op0) == CONCAT)
7374 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7379 /* In cases where an aligned union has an unaligned object
7380 as a field, we might be extracting a BLKmode value from
7381 an integer-mode (e.g., SImode) object. Handle this case
7382 by doing the extract into an object as wide as the field
7383 (which we know to be the width of a basic mode), then
7384 storing into memory, and changing the mode to BLKmode. */
7385 if (mode1 == VOIDmode
7386 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7387 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7388 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7389 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7390 && modifier != EXPAND_CONST_ADDRESS
7391 && modifier != EXPAND_INITIALIZER)
7392 /* If the field isn't aligned enough to fetch as a memref,
7393 fetch it as a bit field. */
7394 || (mode1 != BLKmode
7395 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7396 && ((TYPE_ALIGN (TREE_TYPE (tem))
7397 < GET_MODE_ALIGNMENT (mode))
7398 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7399 /* If the type and the field are a constant size and the
7400 size of the type isn't the same size as the bitfield,
7401 we must use bitfield operations. */
7403 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7405 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7408 enum machine_mode ext_mode = mode;
7410 if (ext_mode == BLKmode
7411 && ! (target != 0 && GET_CODE (op0) == MEM
7412 && GET_CODE (target) == MEM
7413 && bitpos % BITS_PER_UNIT == 0))
7414 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7416 if (ext_mode == BLKmode)
7418 /* In this case, BITPOS must start at a byte boundary and
7419 TARGET, if specified, must be a MEM. */
7420 if (GET_CODE (op0) != MEM
7421 || (target != 0 && GET_CODE (target) != MEM)
7422 || bitpos % BITS_PER_UNIT != 0)
7425 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7427 target = assign_temp (type, 0, 1, 1);
7429 emit_block_move (target, op0,
7430 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7437 op0 = validize_mem (op0);
7439 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7440 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7442 op0 = extract_bit_field (op0, bitsize, bitpos,
7443 unsignedp, target, ext_mode, ext_mode,
7444 int_size_in_bytes (TREE_TYPE (tem)));
7446 /* If the result is a record type and BITSIZE is narrower than
7447 the mode of OP0, an integral mode, and this is a big endian
7448 machine, we must put the field into the high-order bits. */
7449 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7450 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7451 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7452 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7453 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7457 if (mode == BLKmode)
7459 rtx new = assign_temp (build_qualified_type
7460 ((*lang_hooks.types.type_for_mode)
7462 TYPE_QUAL_CONST), 0, 1, 1);
7464 emit_move_insn (new, op0);
7465 op0 = copy_rtx (new);
7466 PUT_MODE (op0, BLKmode);
7467 set_mem_attributes (op0, exp, 1);
7473 /* If the result is BLKmode, use that to access the object
7475 if (mode == BLKmode)
7478 /* Get a reference to just this component. */
7479 if (modifier == EXPAND_CONST_ADDRESS
7480 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7481 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7483 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7485 if (op0 == orig_op0)
7486 op0 = copy_rtx (op0);
7488 set_mem_attributes (op0, exp, 0);
7489 if (GET_CODE (XEXP (op0, 0)) == REG)
7490 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7492 MEM_VOLATILE_P (op0) |= volatilep;
7493 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7494 || modifier == EXPAND_CONST_ADDRESS
7495 || modifier == EXPAND_INITIALIZER)
7497 else if (target == 0)
7498 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7500 convert_move (target, op0, unsignedp);
7506 rtx insn, before = get_last_insn (), vtbl_ref;
7508 /* Evaluate the interior expression. */
7509 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7512 /* Get or create an instruction off which to hang a note. */
7513 if (REG_P (subtarget))
7516 insn = get_last_insn ();
7519 if (! INSN_P (insn))
7520 insn = prev_nonnote_insn (insn);
7524 target = gen_reg_rtx (GET_MODE (subtarget));
7525 insn = emit_move_insn (target, subtarget);
7528 /* Collect the data for the note. */
7529 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7530 vtbl_ref = plus_constant (vtbl_ref,
7531 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7532 /* Discard the initial CONST that was added. */
7533 vtbl_ref = XEXP (vtbl_ref, 0);
7536 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7541 /* Intended for a reference to a buffer of a file-object in Pascal.
7542 But it's not certain that a special tree code will really be
7543 necessary for these. INDIRECT_REF might work for them. */
7549 /* Pascal set IN expression.
7552 rlo = set_low - (set_low%bits_per_word);
7553 the_word = set [ (index - rlo)/bits_per_word ];
7554 bit_index = index % bits_per_word;
7555 bitmask = 1 << bit_index;
7556 return !!(the_word & bitmask); */
7558 tree set = TREE_OPERAND (exp, 0);
7559 tree index = TREE_OPERAND (exp, 1);
7560 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7561 tree set_type = TREE_TYPE (set);
7562 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7563 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7564 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7565 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7566 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7567 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7568 rtx setaddr = XEXP (setval, 0);
7569 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7571 rtx diff, quo, rem, addr, bit, result;
7573 /* If domain is empty, answer is no. Likewise if index is constant
7574 and out of bounds. */
7575 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7576 && TREE_CODE (set_low_bound) == INTEGER_CST
7577 && tree_int_cst_lt (set_high_bound, set_low_bound))
7578 || (TREE_CODE (index) == INTEGER_CST
7579 && TREE_CODE (set_low_bound) == INTEGER_CST
7580 && tree_int_cst_lt (index, set_low_bound))
7581 || (TREE_CODE (set_high_bound) == INTEGER_CST
7582 && TREE_CODE (index) == INTEGER_CST
7583 && tree_int_cst_lt (set_high_bound, index))))
7587 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7589 /* If we get here, we have to generate the code for both cases
7590 (in range and out of range). */
7592 op0 = gen_label_rtx ();
7593 op1 = gen_label_rtx ();
7595 if (! (GET_CODE (index_val) == CONST_INT
7596 && GET_CODE (lo_r) == CONST_INT))
7597 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7598 GET_MODE (index_val), iunsignedp, op1);
7600 if (! (GET_CODE (index_val) == CONST_INT
7601 && GET_CODE (hi_r) == CONST_INT))
7602 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7603 GET_MODE (index_val), iunsignedp, op1);
7605 /* Calculate the element number of bit zero in the first word
7607 if (GET_CODE (lo_r) == CONST_INT)
7608 rlow = GEN_INT (INTVAL (lo_r)
7609 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7611 rlow = expand_binop (index_mode, and_optab, lo_r,
7612 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7613 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7615 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7616 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7618 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7619 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7620 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7621 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7623 addr = memory_address (byte_mode,
7624 expand_binop (index_mode, add_optab, diff,
7625 setaddr, NULL_RTX, iunsignedp,
7628 /* Extract the bit we want to examine. */
7629 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7630 gen_rtx_MEM (byte_mode, addr),
7631 make_tree (TREE_TYPE (index), rem),
7633 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7634 GET_MODE (target) == byte_mode ? target : 0,
7635 1, OPTAB_LIB_WIDEN);
7637 if (result != target)
7638 convert_move (target, result, 1);
7640 /* Output the code to handle the out-of-range case. */
7643 emit_move_insn (target, const0_rtx);
7648 case WITH_CLEANUP_EXPR:
7649 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7651 WITH_CLEANUP_EXPR_RTL (exp)
7652 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7653 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7654 CLEANUP_EH_ONLY (exp));
7656 /* That's it for this cleanup. */
7657 TREE_OPERAND (exp, 1) = 0;
7659 return WITH_CLEANUP_EXPR_RTL (exp);
7661 case CLEANUP_POINT_EXPR:
7663 /* Start a new binding layer that will keep track of all cleanup
7664 actions to be performed. */
7665 expand_start_bindings (2);
7667 target_temp_slot_level = temp_slot_level;
7669 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7670 /* If we're going to use this value, load it up now. */
7672 op0 = force_not_mem (op0);
7673 preserve_temp_slots (op0);
7674 expand_end_bindings (NULL_TREE, 0, 0);
7679 /* Check for a built-in function. */
7680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7681 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7683 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7685 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7686 == BUILT_IN_FRONTEND)
7687 return (*lang_hooks.expand_expr)
7688 (exp, original_target, tmode, modifier);
7690 return expand_builtin (exp, target, subtarget, tmode, ignore);
7693 return expand_call (exp, target, ignore);
7695 case NON_LVALUE_EXPR:
7698 case REFERENCE_EXPR:
7699 if (TREE_OPERAND (exp, 0) == error_mark_node)
7702 if (TREE_CODE (type) == UNION_TYPE)
7704 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7706 /* If both input and output are BLKmode, this conversion isn't doing
7707 anything except possibly changing memory attribute. */
7708 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7710 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7713 result = copy_rtx (result);
7714 set_mem_attributes (result, exp, 0);
7719 target = assign_temp (type, 0, 1, 1);
7721 if (GET_CODE (target) == MEM)
7722 /* Store data into beginning of memory target. */
7723 store_expr (TREE_OPERAND (exp, 0),
7724 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7726 else if (GET_CODE (target) == REG)
7727 /* Store this field into a union of the proper type. */
7728 store_field (target,
7729 MIN ((int_size_in_bytes (TREE_TYPE
7730 (TREE_OPERAND (exp, 0)))
7732 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7733 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7734 VOIDmode, 0, type, 0);
7738 /* Return the entire union. */
7742 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7744 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7747 /* If the signedness of the conversion differs and OP0 is
7748 a promoted SUBREG, clear that indication since we now
7749 have to do the proper extension. */
7750 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7751 && GET_CODE (op0) == SUBREG)
7752 SUBREG_PROMOTED_VAR_P (op0) = 0;
7757 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7758 if (GET_MODE (op0) == mode)
7761 /* If OP0 is a constant, just convert it into the proper mode. */
7762 if (CONSTANT_P (op0))
7764 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7765 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7767 if (modifier == EXPAND_INITIALIZER)
7768 return simplify_gen_subreg (mode, op0, inner_mode,
7769 subreg_lowpart_offset (mode,
7772 return convert_modes (mode, inner_mode, op0,
7773 TREE_UNSIGNED (inner_type));
7776 if (modifier == EXPAND_INITIALIZER)
7777 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7781 convert_to_mode (mode, op0,
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7784 convert_move (target, op0,
7785 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7788 case VIEW_CONVERT_EXPR:
7789 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7791 /* If the input and output modes are both the same, we are done.
7792 Otherwise, if neither mode is BLKmode and both are within a word, we
7793 can use gen_lowpart. If neither is true, make sure the operand is
7794 in memory and convert the MEM to the new mode. */
7795 if (TYPE_MODE (type) == GET_MODE (op0))
7797 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7798 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7799 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7800 op0 = gen_lowpart (TYPE_MODE (type), op0);
7801 else if (GET_CODE (op0) != MEM)
7803 /* If the operand is not a MEM, force it into memory. Since we
7804 are going to be be changing the mode of the MEM, don't call
7805 force_const_mem for constants because we don't allow pool
7806 constants to change mode. */
7807 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7809 if (TREE_ADDRESSABLE (exp))
7812 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7814 = assign_stack_temp_for_type
7815 (TYPE_MODE (inner_type),
7816 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7818 emit_move_insn (target, op0);
7822 /* At this point, OP0 is in the correct mode. If the output type is such
7823 that the operand is known to be aligned, indicate that it is.
7824 Otherwise, we need only be concerned about alignment for non-BLKmode
7826 if (GET_CODE (op0) == MEM)
7828 op0 = copy_rtx (op0);
7830 if (TYPE_ALIGN_OK (type))
7831 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7832 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7833 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7835 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7836 HOST_WIDE_INT temp_size
7837 = MAX (int_size_in_bytes (inner_type),
7838 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7839 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7840 temp_size, 0, type);
7841 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7843 if (TREE_ADDRESSABLE (exp))
7846 if (GET_MODE (op0) == BLKmode)
7847 emit_block_move (new_with_op0_mode, op0,
7848 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7851 emit_move_insn (new_with_op0_mode, op0);
7856 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7862 this_optab = ! unsignedp && flag_trapv
7863 && (GET_MODE_CLASS (mode) == MODE_INT)
7864 ? addv_optab : add_optab;
7866 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7867 something else, make sure we add the register to the constant and
7868 then to the other thing. This case can occur during strength
7869 reduction and doing it this way will produce better code if the
7870 frame pointer or argument pointer is eliminated.
7872 fold-const.c will ensure that the constant is always in the inner
7873 PLUS_EXPR, so the only case we need to do anything about is if
7874 sp, ap, or fp is our second argument, in which case we must swap
7875 the innermost first argument and our second argument. */
7877 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7878 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7879 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7880 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7881 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7882 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7884 tree t = TREE_OPERAND (exp, 1);
7886 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7887 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7890 /* If the result is to be ptr_mode and we are adding an integer to
7891 something, we might be forming a constant. So try to use
7892 plus_constant. If it produces a sum and we can't accept it,
7893 use force_operand. This allows P = &ARR[const] to generate
7894 efficient code on machines where a SYMBOL_REF is not a valid
7897 If this is an EXPAND_SUM call, always return the sum. */
7898 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7899 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7901 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7902 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7903 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7907 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7909 /* Use immed_double_const to ensure that the constant is
7910 truncated according to the mode of OP1, then sign extended
7911 to a HOST_WIDE_INT. Using the constant directly can result
7912 in non-canonical RTL in a 64x32 cross compile. */
7914 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7916 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7917 op1 = plus_constant (op1, INTVAL (constant_part));
7918 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7919 op1 = force_operand (op1, target);
7923 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7924 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7925 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7930 (modifier == EXPAND_INITIALIZER
7931 ? EXPAND_INITIALIZER : EXPAND_SUM));
7932 if (! CONSTANT_P (op0))
7934 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7935 VOIDmode, modifier);
7936 /* Don't go to both_summands if modifier
7937 says it's not right to return a PLUS. */
7938 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7942 /* Use immed_double_const to ensure that the constant is
7943 truncated according to the mode of OP1, then sign extended
7944 to a HOST_WIDE_INT. Using the constant directly can result
7945 in non-canonical RTL in a 64x32 cross compile. */
7947 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7949 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7950 op0 = plus_constant (op0, INTVAL (constant_part));
7951 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7952 op0 = force_operand (op0, target);
7957 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7960 /* No sense saving up arithmetic to be done
7961 if it's all in the wrong mode to form part of an address.
7962 And force_operand won't know whether to sign-extend or
7964 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7965 || mode != ptr_mode)
7967 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7968 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7969 if (op0 == const0_rtx)
7971 if (op1 == const0_rtx)
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7979 /* We come here from MINUS_EXPR when the second operand is a
7982 /* Make sure any term that's a sum with a constant comes last. */
7983 if (GET_CODE (op0) == PLUS
7984 && CONSTANT_P (XEXP (op0, 1)))
7990 /* If adding to a sum including a constant,
7991 associate it to put the constant outside. */
7992 if (GET_CODE (op1) == PLUS
7993 && CONSTANT_P (XEXP (op1, 1)))
7995 rtx constant_term = const0_rtx;
7997 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8000 /* Ensure that MULT comes first if there is one. */
8001 else if (GET_CODE (op0) == MULT)
8002 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8004 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8006 /* Let's also eliminate constants from op0 if possible. */
8007 op0 = eliminate_constant_term (op0, &constant_term);
8009 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8010 their sum should be a constant. Form it into OP1, since the
8011 result we want will then be OP0 + OP1. */
8013 temp = simplify_binary_operation (PLUS, mode, constant_term,
8018 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8021 /* Put a constant term last and put a multiplication first. */
8022 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8023 temp = op1, op1 = op0, op0 = temp;
8025 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8026 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8029 /* For initializers, we are allowed to return a MINUS of two
8030 symbolic constants. Here we handle all cases when both operands
8032 /* Handle difference of two symbolic constants,
8033 for the sake of an initializer. */
8034 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8035 && really_constant_p (TREE_OPERAND (exp, 0))
8036 && really_constant_p (TREE_OPERAND (exp, 1)))
8038 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8040 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8043 /* If the last operand is a CONST_INT, use plus_constant of
8044 the negated constant. Else make the MINUS. */
8045 if (GET_CODE (op1) == CONST_INT)
8046 return plus_constant (op0, - INTVAL (op1));
8048 return gen_rtx_MINUS (mode, op0, op1);
8051 this_optab = ! unsignedp && flag_trapv
8052 && (GET_MODE_CLASS(mode) == MODE_INT)
8053 ? subv_optab : sub_optab;
8055 /* No sense saving up arithmetic to be done
8056 if it's all in the wrong mode to form part of an address.
8057 And force_operand won't know whether to sign-extend or
8059 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8060 || mode != ptr_mode)
8063 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8067 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8069 /* Convert A - const to A + (-const). */
8070 if (GET_CODE (op1) == CONST_INT)
8072 op1 = negate_rtx (mode, op1);
8079 /* If first operand is constant, swap them.
8080 Thus the following special case checks need only
8081 check the second operand. */
8082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8084 tree t1 = TREE_OPERAND (exp, 0);
8085 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8086 TREE_OPERAND (exp, 1) = t1;
8089 /* Attempt to return something suitable for generating an
8090 indexed address, for machines that support that. */
8092 if (modifier == EXPAND_SUM && mode == ptr_mode
8093 && host_integerp (TREE_OPERAND (exp, 1), 0))
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8098 /* If we knew for certain that this is arithmetic for an array
8099 reference, and we knew the bounds of the array, then we could
8100 apply the distributive law across (PLUS X C) for constant C.
8101 Without such knowledge, we risk overflowing the computation
8102 when both X and C are large, but X+C isn't. */
8103 /* ??? Could perhaps special-case EXP being unsigned and C being
8104 positive. In that case we are certain that X+C is no smaller
8105 than X and so the transformed expression will overflow iff the
8106 original would have. */
8108 if (GET_CODE (op0) != REG)
8109 op0 = force_operand (op0, NULL_RTX);
8110 if (GET_CODE (op0) != REG)
8111 op0 = copy_to_mode_reg (mode, op0);
8114 gen_rtx_MULT (mode, op0,
8115 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8118 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8121 /* Check for multiplying things that have been extended
8122 from a narrower type. If this machine supports multiplying
8123 in that narrower type with a result in the desired type,
8124 do it that way, and avoid the explicit type-conversion. */
8125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8126 && TREE_CODE (type) == INTEGER_TYPE
8127 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8128 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8129 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8130 && int_fits_type_p (TREE_OPERAND (exp, 1),
8131 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8132 /* Don't use a widening multiply if a shift will do. */
8133 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8134 > HOST_BITS_PER_WIDE_INT)
8135 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8137 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8138 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8140 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8141 /* If both operands are extended, they must either both
8142 be zero-extended or both be sign-extended. */
8143 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8145 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8147 enum machine_mode innermode
8148 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8149 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8150 ? smul_widen_optab : umul_widen_optab);
8151 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8152 ? umul_widen_optab : smul_widen_optab);
8153 if (mode == GET_MODE_WIDER_MODE (innermode))
8155 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8157 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8158 NULL_RTX, VOIDmode, 0);
8159 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8163 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8164 NULL_RTX, VOIDmode, 0);
8167 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8168 && innermode == word_mode)
8171 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8172 NULL_RTX, VOIDmode, 0);
8173 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8174 op1 = convert_modes (innermode, mode,
8175 expand_expr (TREE_OPERAND (exp, 1),
8176 NULL_RTX, VOIDmode, 0),
8179 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8180 NULL_RTX, VOIDmode, 0);
8181 temp = expand_binop (mode, other_optab, op0, op1, target,
8182 unsignedp, OPTAB_LIB_WIDEN);
8183 htem = expand_mult_highpart_adjust (innermode,
8184 gen_highpart (innermode, temp),
8186 gen_highpart (innermode, temp),
8188 emit_move_insn (gen_highpart (innermode, temp), htem);
8193 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8194 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8195 return expand_mult (mode, op0, op1, target, unsignedp);
8197 case TRUNC_DIV_EXPR:
8198 case FLOOR_DIV_EXPR:
8200 case ROUND_DIV_EXPR:
8201 case EXACT_DIV_EXPR:
8202 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8204 /* Possible optimization: compute the dividend with EXPAND_SUM
8205 then if the divisor is constant can optimize the case
8206 where some terms of the dividend have coeffs divisible by it. */
8207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8209 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8212 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8213 expensive divide. If not, combine will rebuild the original
8215 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8216 && TREE_CODE (type) == REAL_TYPE
8217 && !real_onep (TREE_OPERAND (exp, 0)))
8218 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8219 build (RDIV_EXPR, type,
8220 build_real (type, dconst1),
8221 TREE_OPERAND (exp, 1))),
8222 target, tmode, unsignedp);
8223 this_optab = sdiv_optab;
8226 case TRUNC_MOD_EXPR:
8227 case FLOOR_MOD_EXPR:
8229 case ROUND_MOD_EXPR:
8230 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8233 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8234 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8236 case FIX_ROUND_EXPR:
8237 case FIX_FLOOR_EXPR:
8239 abort (); /* Not used for C. */
8241 case FIX_TRUNC_EXPR:
8242 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8244 target = gen_reg_rtx (mode);
8245 expand_fix (target, op0, unsignedp);
8249 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8251 target = gen_reg_rtx (mode);
8252 /* expand_float can't figure out what to do if FROM has VOIDmode.
8253 So give it the correct mode. With -O, cse will optimize this. */
8254 if (GET_MODE (op0) == VOIDmode)
8255 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8257 expand_float (target, op0,
8258 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8262 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8263 temp = expand_unop (mode,
8264 ! unsignedp && flag_trapv
8265 && (GET_MODE_CLASS(mode) == MODE_INT)
8266 ? negv_optab : neg_optab, op0, target, 0);
8272 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8274 /* Handle complex values specially. */
8275 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8276 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8277 return expand_complex_abs (mode, op0, target, unsignedp);
8279 /* Unsigned abs is simply the operand. Testing here means we don't
8280 risk generating incorrect code below. */
8281 if (TREE_UNSIGNED (type))
8284 return expand_abs (mode, op0, target, unsignedp,
8285 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8289 target = original_target;
8290 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8291 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8292 || GET_MODE (target) != mode
8293 || (GET_CODE (target) == REG
8294 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8295 target = gen_reg_rtx (mode);
8296 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8297 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8299 /* First try to do it with a special MIN or MAX instruction.
8300 If that does not win, use a conditional jump to select the proper
8302 this_optab = (TREE_UNSIGNED (type)
8303 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8304 : (code == MIN_EXPR ? smin_optab : smax_optab));
8306 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8311 /* At this point, a MEM target is no longer useful; we will get better
8314 if (GET_CODE (target) == MEM)
8315 target = gen_reg_rtx (mode);
8318 emit_move_insn (target, op0);
8320 op0 = gen_label_rtx ();
8322 /* If this mode is an integer too wide to compare properly,
8323 compare word by word. Rely on cse to optimize constant cases. */
8324 if (GET_MODE_CLASS (mode) == MODE_INT
8325 && ! can_compare_p (GE, mode, ccp_jump))
8327 if (code == MAX_EXPR)
8328 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8329 target, op1, NULL_RTX, op0);
8331 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8332 op1, target, NULL_RTX, op0);
8336 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8337 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8338 unsignedp, mode, NULL_RTX, NULL_RTX,
8341 emit_move_insn (target, op1);
8346 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8347 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8353 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8354 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8359 /* ??? Can optimize bitwise operations with one arg constant.
8360 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8361 and (a bitwise1 b) bitwise2 b (etc)
8362 but that is probably not worth while. */
8364 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8365 boolean values when we want in all cases to compute both of them. In
8366 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8367 as actual zero-or-1 values and then bitwise anding. In cases where
8368 there cannot be any side effects, better code would be made by
8369 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8370 how to recognize those cases. */
8372 case TRUTH_AND_EXPR:
8374 this_optab = and_optab;
8379 this_optab = ior_optab;
8382 case TRUTH_XOR_EXPR:
8384 this_optab = xor_optab;
8391 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8393 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8394 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8397 /* Could determine the answer when only additive constants differ. Also,
8398 the addition of one can be handled by changing the condition. */
8405 case UNORDERED_EXPR:
8412 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8416 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8417 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8419 && GET_CODE (original_target) == REG
8420 && (GET_MODE (original_target)
8421 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8423 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8426 /* If temp is constant, we can just compute the result. */
8427 if (GET_CODE (temp) == CONST_INT)
8429 if (INTVAL (temp) != 0)
8430 emit_move_insn (target, const1_rtx);
8432 emit_move_insn (target, const0_rtx);
8437 if (temp != original_target)
8439 enum machine_mode mode1 = GET_MODE (temp);
8440 if (mode1 == VOIDmode)
8441 mode1 = tmode != VOIDmode ? tmode : mode;
8443 temp = copy_to_mode_reg (mode1, temp);
8446 op1 = gen_label_rtx ();
8447 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8448 GET_MODE (temp), unsignedp, op1);
8449 emit_move_insn (temp, const1_rtx);
8454 /* If no set-flag instruction, must generate a conditional
8455 store into a temporary variable. Drop through
8456 and handle this like && and ||. */
8458 case TRUTH_ANDIF_EXPR:
8459 case TRUTH_ORIF_EXPR:
8461 && (target == 0 || ! safe_from_p (target, exp, 1)
8462 /* Make sure we don't have a hard reg (such as function's return
8463 value) live across basic blocks, if not optimizing. */
8464 || (!optimize && GET_CODE (target) == REG
8465 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8466 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8469 emit_clr_insn (target);
8471 op1 = gen_label_rtx ();
8472 jumpifnot (exp, op1);
8475 emit_0_to_1_insn (target);
8478 return ignore ? const0_rtx : target;
8480 case TRUTH_NOT_EXPR:
8481 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8482 /* The parser is careful to generate TRUTH_NOT_EXPR
8483 only with operands that are always zero or one. */
8484 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8485 target, 1, OPTAB_LIB_WIDEN);
8491 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8493 return expand_expr (TREE_OPERAND (exp, 1),
8494 (ignore ? const0_rtx : target),
8498 /* If we would have a "singleton" (see below) were it not for a
8499 conversion in each arm, bring that conversion back out. */
8500 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8501 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8502 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8503 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8505 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8506 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8508 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8509 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8510 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8511 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8512 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8513 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8514 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8515 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8516 return expand_expr (build1 (NOP_EXPR, type,
8517 build (COND_EXPR, TREE_TYPE (iftrue),
8518 TREE_OPERAND (exp, 0),
8520 target, tmode, modifier);
8524 /* Note that COND_EXPRs whose type is a structure or union
8525 are required to be constructed to contain assignments of
8526 a temporary variable, so that we can evaluate them here
8527 for side effect only. If type is void, we must do likewise. */
8529 /* If an arm of the branch requires a cleanup,
8530 only that cleanup is performed. */
8533 tree binary_op = 0, unary_op = 0;
8535 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8536 convert it to our mode, if necessary. */
8537 if (integer_onep (TREE_OPERAND (exp, 1))
8538 && integer_zerop (TREE_OPERAND (exp, 2))
8539 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8543 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8548 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8549 if (GET_MODE (op0) == mode)
8553 target = gen_reg_rtx (mode);
8554 convert_move (target, op0, unsignedp);
8558 /* Check for X ? A + B : A. If we have this, we can copy A to the
8559 output and conditionally add B. Similarly for unary operations.
8560 Don't do this if X has side-effects because those side effects
8561 might affect A or B and the "?" operation is a sequence point in
8562 ANSI. (operand_equal_p tests for side effects.) */
8564 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8565 && operand_equal_p (TREE_OPERAND (exp, 2),
8566 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8567 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8568 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8569 && operand_equal_p (TREE_OPERAND (exp, 1),
8570 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8571 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8572 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8573 && operand_equal_p (TREE_OPERAND (exp, 2),
8574 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8575 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8576 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8577 && operand_equal_p (TREE_OPERAND (exp, 1),
8578 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8579 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8581 /* If we are not to produce a result, we have no target. Otherwise,
8582 if a target was specified use it; it will not be used as an
8583 intermediate target unless it is safe. If no target, use a
8588 else if (original_target
8589 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8590 || (singleton && GET_CODE (original_target) == REG
8591 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8592 && original_target == var_rtx (singleton)))
8593 && GET_MODE (original_target) == mode
8594 #ifdef HAVE_conditional_move
8595 && (! can_conditionally_move_p (mode)
8596 || GET_CODE (original_target) == REG
8597 || TREE_ADDRESSABLE (type))
8599 && (GET_CODE (original_target) != MEM
8600 || TREE_ADDRESSABLE (type)))
8601 temp = original_target;
8602 else if (TREE_ADDRESSABLE (type))
8605 temp = assign_temp (type, 0, 0, 1);
8607 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8608 do the test of X as a store-flag operation, do this as
8609 A + ((X != 0) << log C). Similarly for other simple binary
8610 operators. Only do for C == 1 if BRANCH_COST is low. */
8611 if (temp && singleton && binary_op
8612 && (TREE_CODE (binary_op) == PLUS_EXPR
8613 || TREE_CODE (binary_op) == MINUS_EXPR
8614 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8615 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8616 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8617 : integer_onep (TREE_OPERAND (binary_op, 1)))
8618 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8622 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8623 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8624 ? addv_optab : add_optab)
8625 : TREE_CODE (binary_op) == MINUS_EXPR
8626 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8627 ? subv_optab : sub_optab)
8628 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8631 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8632 if (singleton == TREE_OPERAND (exp, 1))
8633 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8635 cond = TREE_OPERAND (exp, 0);
8637 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8639 mode, BRANCH_COST <= 1);
8641 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8642 result = expand_shift (LSHIFT_EXPR, mode, result,
8643 build_int_2 (tree_log2
8647 (safe_from_p (temp, singleton, 1)
8648 ? temp : NULL_RTX), 0);
8652 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8653 return expand_binop (mode, boptab, op1, result, temp,
8654 unsignedp, OPTAB_LIB_WIDEN);
8658 do_pending_stack_adjust ();
8660 op0 = gen_label_rtx ();
8662 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8666 /* If the target conflicts with the other operand of the
8667 binary op, we can't use it. Also, we can't use the target
8668 if it is a hard register, because evaluating the condition
8669 might clobber it. */
8671 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8672 || (GET_CODE (temp) == REG
8673 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8674 temp = gen_reg_rtx (mode);
8675 store_expr (singleton, temp, 0);
8678 expand_expr (singleton,
8679 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8680 if (singleton == TREE_OPERAND (exp, 1))
8681 jumpif (TREE_OPERAND (exp, 0), op0);
8683 jumpifnot (TREE_OPERAND (exp, 0), op0);
8685 start_cleanup_deferral ();
8686 if (binary_op && temp == 0)
8687 /* Just touch the other operand. */
8688 expand_expr (TREE_OPERAND (binary_op, 1),
8689 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8691 store_expr (build (TREE_CODE (binary_op), type,
8692 make_tree (type, temp),
8693 TREE_OPERAND (binary_op, 1)),
8696 store_expr (build1 (TREE_CODE (unary_op), type,
8697 make_tree (type, temp)),
8701 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8702 comparison operator. If we have one of these cases, set the
8703 output to A, branch on A (cse will merge these two references),
8704 then set the output to FOO. */
8706 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8707 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8709 TREE_OPERAND (exp, 1), 0)
8710 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8711 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8712 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8714 if (GET_CODE (temp) == REG
8715 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8716 temp = gen_reg_rtx (mode);
8717 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8718 jumpif (TREE_OPERAND (exp, 0), op0);
8720 start_cleanup_deferral ();
8721 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8725 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8726 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8727 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8728 TREE_OPERAND (exp, 2), 0)
8729 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8730 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8731 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8733 if (GET_CODE (temp) == REG
8734 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8735 temp = gen_reg_rtx (mode);
8736 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8737 jumpifnot (TREE_OPERAND (exp, 0), op0);
8739 start_cleanup_deferral ();
8740 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8745 op1 = gen_label_rtx ();
8746 jumpifnot (TREE_OPERAND (exp, 0), op0);
8748 start_cleanup_deferral ();
8750 /* One branch of the cond can be void, if it never returns. For
8751 example A ? throw : E */
8753 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8754 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8756 expand_expr (TREE_OPERAND (exp, 1),
8757 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8758 end_cleanup_deferral ();
8760 emit_jump_insn (gen_jump (op1));
8763 start_cleanup_deferral ();
8765 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8766 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8768 expand_expr (TREE_OPERAND (exp, 2),
8769 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8772 end_cleanup_deferral ();
8783 /* Something needs to be initialized, but we didn't know
8784 where that thing was when building the tree. For example,
8785 it could be the return value of a function, or a parameter
8786 to a function which lays down in the stack, or a temporary
8787 variable which must be passed by reference.
8789 We guarantee that the expression will either be constructed
8790 or copied into our original target. */
8792 tree slot = TREE_OPERAND (exp, 0);
8793 tree cleanups = NULL_TREE;
8796 if (TREE_CODE (slot) != VAR_DECL)
8800 target = original_target;
8802 /* Set this here so that if we get a target that refers to a
8803 register variable that's already been used, put_reg_into_stack
8804 knows that it should fix up those uses. */
8805 TREE_USED (slot) = 1;
8809 if (DECL_RTL_SET_P (slot))
8811 target = DECL_RTL (slot);
8812 /* If we have already expanded the slot, so don't do
8814 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8819 target = assign_temp (type, 2, 0, 1);
8820 /* All temp slots at this level must not conflict. */
8821 preserve_temp_slots (target);
8822 SET_DECL_RTL (slot, target);
8823 if (TREE_ADDRESSABLE (slot))
8824 put_var_into_stack (slot);
8826 /* Since SLOT is not known to the called function
8827 to belong to its stack frame, we must build an explicit
8828 cleanup. This case occurs when we must build up a reference
8829 to pass the reference as an argument. In this case,
8830 it is very likely that such a reference need not be
8833 if (TREE_OPERAND (exp, 2) == 0)
8834 TREE_OPERAND (exp, 2)
8835 = (*lang_hooks.maybe_build_cleanup) (slot);
8836 cleanups = TREE_OPERAND (exp, 2);
8841 /* This case does occur, when expanding a parameter which
8842 needs to be constructed on the stack. The target
8843 is the actual stack address that we want to initialize.
8844 The function we call will perform the cleanup in this case. */
8846 /* If we have already assigned it space, use that space,
8847 not target that we were passed in, as our target
8848 parameter is only a hint. */
8849 if (DECL_RTL_SET_P (slot))
8851 target = DECL_RTL (slot);
8852 /* If we have already expanded the slot, so don't do
8854 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8859 SET_DECL_RTL (slot, target);
8860 /* If we must have an addressable slot, then make sure that
8861 the RTL that we just stored in slot is OK. */
8862 if (TREE_ADDRESSABLE (slot))
8863 put_var_into_stack (slot);
8867 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8868 /* Mark it as expanded. */
8869 TREE_OPERAND (exp, 1) = NULL_TREE;
8871 store_expr (exp1, target, 0);
8873 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8880 tree lhs = TREE_OPERAND (exp, 0);
8881 tree rhs = TREE_OPERAND (exp, 1);
8883 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8889 /* If lhs is complex, expand calls in rhs before computing it.
8890 That's so we don't compute a pointer and save it over a
8891 call. If lhs is simple, compute it first so we can give it
8892 as a target if the rhs is just a call. This avoids an
8893 extra temp and copy and that prevents a partial-subsumption
8894 which makes bad code. Actually we could treat
8895 component_ref's of vars like vars. */
8897 tree lhs = TREE_OPERAND (exp, 0);
8898 tree rhs = TREE_OPERAND (exp, 1);
8902 /* Check for |= or &= of a bitfield of size one into another bitfield
8903 of size 1. In this case, (unless we need the result of the
8904 assignment) we can do this more efficiently with a
8905 test followed by an assignment, if necessary.
8907 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8908 things change so we do, this code should be enhanced to
8911 && TREE_CODE (lhs) == COMPONENT_REF
8912 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8913 || TREE_CODE (rhs) == BIT_AND_EXPR)
8914 && TREE_OPERAND (rhs, 0) == lhs
8915 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8916 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8917 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8919 rtx label = gen_label_rtx ();
8921 do_jump (TREE_OPERAND (rhs, 1),
8922 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8923 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8924 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8925 (TREE_CODE (rhs) == BIT_IOR_EXPR
8927 : integer_zero_node)),
8929 do_pending_stack_adjust ();
8934 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8940 if (!TREE_OPERAND (exp, 0))
8941 expand_null_return ();
8943 expand_return (TREE_OPERAND (exp, 0));
8946 case PREINCREMENT_EXPR:
8947 case PREDECREMENT_EXPR:
8948 return expand_increment (exp, 0, ignore);
8950 case POSTINCREMENT_EXPR:
8951 case POSTDECREMENT_EXPR:
8952 /* Faster to treat as pre-increment if result is not used. */
8953 return expand_increment (exp, ! ignore, ignore);
8956 /* Are we taking the address of a nested function? */
8957 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8958 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8959 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8960 && ! TREE_STATIC (exp))
8962 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8963 op0 = force_operand (op0, target);
8965 /* If we are taking the address of something erroneous, just
8967 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8969 /* If we are taking the address of a constant and are at the
8970 top level, we have to use output_constant_def since we can't
8971 call force_const_mem at top level. */
8973 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8974 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8976 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8979 /* We make sure to pass const0_rtx down if we came in with
8980 ignore set, to avoid doing the cleanups twice for something. */
8981 op0 = expand_expr (TREE_OPERAND (exp, 0),
8982 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8983 (modifier == EXPAND_INITIALIZER
8984 ? modifier : EXPAND_CONST_ADDRESS));
8986 /* If we are going to ignore the result, OP0 will have been set
8987 to const0_rtx, so just return it. Don't get confused and
8988 think we are taking the address of the constant. */
8992 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8993 clever and returns a REG when given a MEM. */
8994 op0 = protect_from_queue (op0, 1);
8996 /* We would like the object in memory. If it is a constant, we can
8997 have it be statically allocated into memory. For a non-constant,
8998 we need to allocate some memory and store the value into it. */
9000 if (CONSTANT_P (op0))
9001 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9003 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9004 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9005 || GET_CODE (op0) == PARALLEL)
9007 /* If the operand is a SAVE_EXPR, we can deal with this by
9008 forcing the SAVE_EXPR into memory. */
9009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9011 put_var_into_stack (TREE_OPERAND (exp, 0));
9012 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9016 /* If this object is in a register, it can't be BLKmode. */
9017 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9018 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9020 if (GET_CODE (op0) == PARALLEL)
9021 /* Handle calls that pass values in multiple
9022 non-contiguous locations. The Irix 6 ABI has examples
9024 emit_group_store (memloc, op0,
9025 int_size_in_bytes (inner_type));
9027 emit_move_insn (memloc, op0);
9033 if (GET_CODE (op0) != MEM)
9036 mark_temp_addr_taken (op0);
9037 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9039 op0 = XEXP (op0, 0);
9040 #ifdef POINTERS_EXTEND_UNSIGNED
9041 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9042 && mode == ptr_mode)
9043 op0 = convert_memory_address (ptr_mode, op0);
9048 /* If OP0 is not aligned as least as much as the type requires, we
9049 need to make a temporary, copy OP0 to it, and take the address of
9050 the temporary. We want to use the alignment of the type, not of
9051 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9052 the test for BLKmode means that can't happen. The test for
9053 BLKmode is because we never make mis-aligned MEMs with
9056 We don't need to do this at all if the machine doesn't have
9057 strict alignment. */
9058 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9059 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9061 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9063 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9065 = assign_stack_temp_for_type
9066 (TYPE_MODE (inner_type),
9067 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9068 : int_size_in_bytes (inner_type),
9069 1, build_qualified_type (inner_type,
9070 (TYPE_QUALS (inner_type)
9071 | TYPE_QUAL_CONST)));
9073 if (TYPE_ALIGN_OK (inner_type))
9076 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9081 op0 = force_operand (XEXP (op0, 0), target);
9085 && GET_CODE (op0) != REG
9086 && modifier != EXPAND_CONST_ADDRESS
9087 && modifier != EXPAND_INITIALIZER
9088 && modifier != EXPAND_SUM)
9089 op0 = force_reg (Pmode, op0);
9091 if (GET_CODE (op0) == REG
9092 && ! REG_USERVAR_P (op0))
9093 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9095 #ifdef POINTERS_EXTEND_UNSIGNED
9096 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9097 && mode == ptr_mode)
9098 op0 = convert_memory_address (ptr_mode, op0);
9103 case ENTRY_VALUE_EXPR:
9106 /* COMPLEX type for Extended Pascal & Fortran */
9109 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9112 /* Get the rtx code of the operands. */
9113 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9114 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9117 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9121 /* Move the real (op0) and imaginary (op1) parts to their location. */
9122 emit_move_insn (gen_realpart (mode, target), op0);
9123 emit_move_insn (gen_imagpart (mode, target), op1);
9125 insns = get_insns ();
9128 /* Complex construction should appear as a single unit. */
9129 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9130 each with a separate pseudo as destination.
9131 It's not correct for flow to treat them as a unit. */
9132 if (GET_CODE (target) != CONCAT)
9133 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9141 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9142 return gen_realpart (mode, op0);
9145 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9146 return gen_imagpart (mode, op0);
9150 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9154 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9157 target = gen_reg_rtx (mode);
9161 /* Store the realpart and the negated imagpart to target. */
9162 emit_move_insn (gen_realpart (partmode, target),
9163 gen_realpart (partmode, op0));
9165 imag_t = gen_imagpart (partmode, target);
9166 temp = expand_unop (partmode,
9167 ! unsignedp && flag_trapv
9168 && (GET_MODE_CLASS(partmode) == MODE_INT)
9169 ? negv_optab : neg_optab,
9170 gen_imagpart (partmode, op0), imag_t, 0);
9172 emit_move_insn (imag_t, temp);
9174 insns = get_insns ();
9177 /* Conjugate should appear as a single unit
9178 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9179 each with a separate pseudo as destination.
9180 It's not correct for flow to treat them as a unit. */
9181 if (GET_CODE (target) != CONCAT)
9182 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9189 case TRY_CATCH_EXPR:
9191 tree handler = TREE_OPERAND (exp, 1);
9193 expand_eh_region_start ();
9195 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9197 expand_eh_region_end_cleanup (handler);
9202 case TRY_FINALLY_EXPR:
9204 tree try_block = TREE_OPERAND (exp, 0);
9205 tree finally_block = TREE_OPERAND (exp, 1);
9207 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9209 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9210 is not sufficient, so we cannot expand the block twice.
9211 So we play games with GOTO_SUBROUTINE_EXPR to let us
9212 expand the thing only once. */
9213 /* When not optimizing, we go ahead with this form since
9214 (1) user breakpoints operate more predictably without
9215 code duplication, and
9216 (2) we're not running any of the global optimizers
9217 that would explode in time/space with the highly
9218 connected CFG created by the indirect branching. */
9220 rtx finally_label = gen_label_rtx ();
9221 rtx done_label = gen_label_rtx ();
9222 rtx return_link = gen_reg_rtx (Pmode);
9223 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9224 (tree) finally_label, (tree) return_link);
9225 TREE_SIDE_EFFECTS (cleanup) = 1;
9227 /* Start a new binding layer that will keep track of all cleanup
9228 actions to be performed. */
9229 expand_start_bindings (2);
9230 target_temp_slot_level = temp_slot_level;
9232 expand_decl_cleanup (NULL_TREE, cleanup);
9233 op0 = expand_expr (try_block, target, tmode, modifier);
9235 preserve_temp_slots (op0);
9236 expand_end_bindings (NULL_TREE, 0, 0);
9237 emit_jump (done_label);
9238 emit_label (finally_label);
9239 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9240 emit_indirect_jump (return_link);
9241 emit_label (done_label);
9245 expand_start_bindings (2);
9246 target_temp_slot_level = temp_slot_level;
9248 expand_decl_cleanup (NULL_TREE, finally_block);
9249 op0 = expand_expr (try_block, target, tmode, modifier);
9251 preserve_temp_slots (op0);
9252 expand_end_bindings (NULL_TREE, 0, 0);
9258 case GOTO_SUBROUTINE_EXPR:
9260 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9261 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9262 rtx return_address = gen_label_rtx ();
9263 emit_move_insn (return_link,
9264 gen_rtx_LABEL_REF (Pmode, return_address));
9266 emit_label (return_address);
9271 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9274 return get_exception_pointer (cfun);
9277 /* Function descriptors are not valid except for as
9278 initialization constants, and should not be expanded. */
9282 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9285 /* Here to do an ordinary binary operator, generating an instruction
9286 from the optab already placed in `this_optab'. */
9288 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9291 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9293 temp = expand_binop (mode, this_optab, op0, op1, target,
9294 unsignedp, OPTAB_LIB_WIDEN);
9300 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9301 when applied to the address of EXP produces an address known to be
9302 aligned more than BIGGEST_ALIGNMENT. */
9305 is_aligning_offset (offset, exp)
9309 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9310 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9311 || TREE_CODE (offset) == NOP_EXPR
9312 || TREE_CODE (offset) == CONVERT_EXPR
9313 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9314 offset = TREE_OPERAND (offset, 0);
9316 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9317 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9318 if (TREE_CODE (offset) != BIT_AND_EXPR
9319 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9320 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9321 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9324 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9325 It must be NEGATE_EXPR. Then strip any more conversions. */
9326 offset = TREE_OPERAND (offset, 0);
9327 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9328 || TREE_CODE (offset) == NOP_EXPR
9329 || TREE_CODE (offset) == CONVERT_EXPR)
9330 offset = TREE_OPERAND (offset, 0);
9332 if (TREE_CODE (offset) != NEGATE_EXPR)
9335 offset = TREE_OPERAND (offset, 0);
9336 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9337 || TREE_CODE (offset) == NOP_EXPR
9338 || TREE_CODE (offset) == CONVERT_EXPR)
9339 offset = TREE_OPERAND (offset, 0);
9341 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9342 whose type is the same as EXP. */
9343 return (TREE_CODE (offset) == ADDR_EXPR
9344 && (TREE_OPERAND (offset, 0) == exp
9345 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9346 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9347 == TREE_TYPE (exp)))));
9350 /* Return the tree node if an ARG corresponds to a string constant or zero
9351 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9352 in bytes within the string that ARG is accessing. The type of the
9353 offset will be `sizetype'. */
9356 string_constant (arg, ptr_offset)
9362 if (TREE_CODE (arg) == ADDR_EXPR
9363 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9365 *ptr_offset = size_zero_node;
9366 return TREE_OPERAND (arg, 0);
9368 else if (TREE_CODE (arg) == PLUS_EXPR)
9370 tree arg0 = TREE_OPERAND (arg, 0);
9371 tree arg1 = TREE_OPERAND (arg, 1);
9376 if (TREE_CODE (arg0) == ADDR_EXPR
9377 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9379 *ptr_offset = convert (sizetype, arg1);
9380 return TREE_OPERAND (arg0, 0);
9382 else if (TREE_CODE (arg1) == ADDR_EXPR
9383 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9385 *ptr_offset = convert (sizetype, arg0);
9386 return TREE_OPERAND (arg1, 0);
9393 /* Expand code for a post- or pre- increment or decrement
9394 and return the RTX for the result.
9395 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9398 expand_increment (exp, post, ignore)
9404 tree incremented = TREE_OPERAND (exp, 0);
9405 optab this_optab = add_optab;
9407 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9408 int op0_is_copy = 0;
9409 int single_insn = 0;
9410 /* 1 means we can't store into OP0 directly,
9411 because it is a subreg narrower than a word,
9412 and we don't dare clobber the rest of the word. */
9415 /* Stabilize any component ref that might need to be
9416 evaluated more than once below. */
9418 || TREE_CODE (incremented) == BIT_FIELD_REF
9419 || (TREE_CODE (incremented) == COMPONENT_REF
9420 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9421 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9422 incremented = stabilize_reference (incremented);
9423 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9424 ones into save exprs so that they don't accidentally get evaluated
9425 more than once by the code below. */
9426 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9427 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9428 incremented = save_expr (incremented);
9430 /* Compute the operands as RTX.
9431 Note whether OP0 is the actual lvalue or a copy of it:
9432 I believe it is a copy iff it is a register or subreg
9433 and insns were generated in computing it. */
9435 temp = get_last_insn ();
9436 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9438 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9439 in place but instead must do sign- or zero-extension during assignment,
9440 so we copy it into a new register and let the code below use it as
9443 Note that we can safely modify this SUBREG since it is know not to be
9444 shared (it was made by the expand_expr call above). */
9446 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9449 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9453 else if (GET_CODE (op0) == SUBREG
9454 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9456 /* We cannot increment this SUBREG in place. If we are
9457 post-incrementing, get a copy of the old value. Otherwise,
9458 just mark that we cannot increment in place. */
9460 op0 = copy_to_reg (op0);
9465 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9466 && temp != get_last_insn ());
9467 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9469 /* Decide whether incrementing or decrementing. */
9470 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9471 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9472 this_optab = sub_optab;
9474 /* Convert decrement by a constant into a negative increment. */
9475 if (this_optab == sub_optab
9476 && GET_CODE (op1) == CONST_INT)
9478 op1 = GEN_INT (-INTVAL (op1));
9479 this_optab = add_optab;
9482 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9483 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9485 /* For a preincrement, see if we can do this with a single instruction. */
9488 icode = (int) this_optab->handlers[(int) mode].insn_code;
9489 if (icode != (int) CODE_FOR_nothing
9490 /* Make sure that OP0 is valid for operands 0 and 1
9491 of the insn we want to queue. */
9492 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9493 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9494 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9498 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9499 then we cannot just increment OP0. We must therefore contrive to
9500 increment the original value. Then, for postincrement, we can return
9501 OP0 since it is a copy of the old value. For preincrement, expand here
9502 unless we can do it with a single insn.
9504 Likewise if storing directly into OP0 would clobber high bits
9505 we need to preserve (bad_subreg). */
9506 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9508 /* This is the easiest way to increment the value wherever it is.
9509 Problems with multiple evaluation of INCREMENTED are prevented
9510 because either (1) it is a component_ref or preincrement,
9511 in which case it was stabilized above, or (2) it is an array_ref
9512 with constant index in an array in a register, which is
9513 safe to reevaluate. */
9514 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9515 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9516 ? MINUS_EXPR : PLUS_EXPR),
9519 TREE_OPERAND (exp, 1));
9521 while (TREE_CODE (incremented) == NOP_EXPR
9522 || TREE_CODE (incremented) == CONVERT_EXPR)
9524 newexp = convert (TREE_TYPE (incremented), newexp);
9525 incremented = TREE_OPERAND (incremented, 0);
9528 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9529 return post ? op0 : temp;
9534 /* We have a true reference to the value in OP0.
9535 If there is an insn to add or subtract in this mode, queue it.
9536 Queueing the increment insn avoids the register shuffling
9537 that often results if we must increment now and first save
9538 the old value for subsequent use. */
9540 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9541 op0 = stabilize (op0);
9544 icode = (int) this_optab->handlers[(int) mode].insn_code;
9545 if (icode != (int) CODE_FOR_nothing
9546 /* Make sure that OP0 is valid for operands 0 and 1
9547 of the insn we want to queue. */
9548 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9549 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9551 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9552 op1 = force_reg (mode, op1);
9554 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9556 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9558 rtx addr = (general_operand (XEXP (op0, 0), mode)
9559 ? force_reg (Pmode, XEXP (op0, 0))
9560 : copy_to_reg (XEXP (op0, 0)));
9563 op0 = replace_equiv_address (op0, addr);
9564 temp = force_reg (GET_MODE (op0), op0);
9565 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9566 op1 = force_reg (mode, op1);
9568 /* The increment queue is LIFO, thus we have to `queue'
9569 the instructions in reverse order. */
9570 enqueue_insn (op0, gen_move_insn (op0, temp));
9571 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9576 /* Preincrement, or we can't increment with one simple insn. */
9578 /* Save a copy of the value before inc or dec, to return it later. */
9579 temp = value = copy_to_reg (op0);
9581 /* Arrange to return the incremented value. */
9582 /* Copy the rtx because expand_binop will protect from the queue,
9583 and the results of that would be invalid for us to return
9584 if our caller does emit_queue before using our result. */
9585 temp = copy_rtx (value = op0);
9587 /* Increment however we can. */
9588 op1 = expand_binop (mode, this_optab, value, op1, op0,
9589 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9591 /* Make sure the value is stored into OP0. */
9593 emit_move_insn (op0, op1);
9598 /* At the start of a function, record that we have no previously-pushed
9599 arguments waiting to be popped. */
9602 init_pending_stack_adjust ()
9604 pending_stack_adjust = 0;
9607 /* When exiting from function, if safe, clear out any pending stack adjust
9608 so the adjustment won't get done.
9610 Note, if the current function calls alloca, then it must have a
9611 frame pointer regardless of the value of flag_omit_frame_pointer. */
9614 clear_pending_stack_adjust ()
9616 #ifdef EXIT_IGNORE_STACK
9618 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9619 && EXIT_IGNORE_STACK
9620 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9621 && ! flag_inline_functions)
9623 stack_pointer_delta -= pending_stack_adjust,
9624 pending_stack_adjust = 0;
9629 /* Pop any previously-pushed arguments that have not been popped yet. */
9632 do_pending_stack_adjust ()
9634 if (inhibit_defer_pop == 0)
9636 if (pending_stack_adjust != 0)
9637 adjust_stack (GEN_INT (pending_stack_adjust));
9638 pending_stack_adjust = 0;
9642 /* Expand conditional expressions. */
9644 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9645 LABEL is an rtx of code CODE_LABEL, in this function and all the
9649 jumpifnot (exp, label)
9653 do_jump (exp, label, NULL_RTX);
9656 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9663 do_jump (exp, NULL_RTX, label);
9666 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9667 the result is zero, or IF_TRUE_LABEL if the result is one.
9668 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9669 meaning fall through in that case.
9671 do_jump always does any pending stack adjust except when it does not
9672 actually perform a jump. An example where there is no jump
9673 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9675 This function is responsible for optimizing cases such as
9676 &&, || and comparison operators in EXP. */
9679 do_jump (exp, if_false_label, if_true_label)
9681 rtx if_false_label, if_true_label;
9683 enum tree_code code = TREE_CODE (exp);
9684 /* Some cases need to create a label to jump to
9685 in order to properly fall through.
9686 These cases set DROP_THROUGH_LABEL nonzero. */
9687 rtx drop_through_label = 0;
9691 enum machine_mode mode;
9693 #ifdef MAX_INTEGER_COMPUTATION_MODE
9694 check_max_integer_computation_mode (exp);
9705 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9711 /* This is not true with #pragma weak */
9713 /* The address of something can never be zero. */
9715 emit_jump (if_true_label);
9720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9721 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9722 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9723 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9726 /* If we are narrowing the operand, we have to do the compare in the
9728 if ((TYPE_PRECISION (TREE_TYPE (exp))
9729 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9731 case NON_LVALUE_EXPR:
9732 case REFERENCE_EXPR:
9737 /* These cannot change zero->nonzero or vice versa. */
9738 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9741 case WITH_RECORD_EXPR:
9742 /* Put the object on the placeholder list, recurse through our first
9743 operand, and pop the list. */
9744 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9746 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9747 placeholder_list = TREE_CHAIN (placeholder_list);
9751 /* This is never less insns than evaluating the PLUS_EXPR followed by
9752 a test and can be longer if the test is eliminated. */
9754 /* Reduce to minus. */
9755 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9756 TREE_OPERAND (exp, 0),
9757 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9758 TREE_OPERAND (exp, 1))));
9759 /* Process as MINUS. */
9763 /* Nonzero iff operands of minus differ. */
9764 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9765 TREE_OPERAND (exp, 0),
9766 TREE_OPERAND (exp, 1)),
9767 NE, NE, if_false_label, if_true_label);
9771 /* If we are AND'ing with a small constant, do this comparison in the
9772 smallest type that fits. If the machine doesn't have comparisons
9773 that small, it will be converted back to the wider comparison.
9774 This helps if we are testing the sign bit of a narrower object.
9775 combine can't do this for us because it can't know whether a
9776 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9778 if (! SLOW_BYTE_ACCESS
9779 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9780 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9781 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9782 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9783 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9784 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9785 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9786 != CODE_FOR_nothing))
9788 do_jump (convert (type, exp), if_false_label, if_true_label);
9793 case TRUTH_NOT_EXPR:
9794 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9797 case TRUTH_ANDIF_EXPR:
9798 if (if_false_label == 0)
9799 if_false_label = drop_through_label = gen_label_rtx ();
9800 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9801 start_cleanup_deferral ();
9802 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9803 end_cleanup_deferral ();
9806 case TRUTH_ORIF_EXPR:
9807 if (if_true_label == 0)
9808 if_true_label = drop_through_label = gen_label_rtx ();
9809 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9810 start_cleanup_deferral ();
9811 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9812 end_cleanup_deferral ();
9817 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9818 preserve_temp_slots (NULL_RTX);
9822 do_pending_stack_adjust ();
9823 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9829 case ARRAY_RANGE_REF:
9831 HOST_WIDE_INT bitsize, bitpos;
9833 enum machine_mode mode;
9838 /* Get description of this reference. We don't actually care
9839 about the underlying object here. */
9840 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9841 &unsignedp, &volatilep);
9843 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9844 if (! SLOW_BYTE_ACCESS
9845 && type != 0 && bitsize >= 0
9846 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9847 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9848 != CODE_FOR_nothing))
9850 do_jump (convert (type, exp), if_false_label, if_true_label);
9857 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9858 if (integer_onep (TREE_OPERAND (exp, 1))
9859 && integer_zerop (TREE_OPERAND (exp, 2)))
9860 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9862 else if (integer_zerop (TREE_OPERAND (exp, 1))
9863 && integer_onep (TREE_OPERAND (exp, 2)))
9864 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9868 rtx label1 = gen_label_rtx ();
9869 drop_through_label = gen_label_rtx ();
9871 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9873 start_cleanup_deferral ();
9874 /* Now the THEN-expression. */
9875 do_jump (TREE_OPERAND (exp, 1),
9876 if_false_label ? if_false_label : drop_through_label,
9877 if_true_label ? if_true_label : drop_through_label);
9878 /* In case the do_jump just above never jumps. */
9879 do_pending_stack_adjust ();
9880 emit_label (label1);
9882 /* Now the ELSE-expression. */
9883 do_jump (TREE_OPERAND (exp, 2),
9884 if_false_label ? if_false_label : drop_through_label,
9885 if_true_label ? if_true_label : drop_through_label);
9886 end_cleanup_deferral ();
9892 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9894 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9895 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9897 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9898 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9901 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9902 fold (build (EQ_EXPR, TREE_TYPE (exp),
9903 fold (build1 (REALPART_EXPR,
9904 TREE_TYPE (inner_type),
9906 fold (build1 (REALPART_EXPR,
9907 TREE_TYPE (inner_type),
9909 fold (build (EQ_EXPR, TREE_TYPE (exp),
9910 fold (build1 (IMAGPART_EXPR,
9911 TREE_TYPE (inner_type),
9913 fold (build1 (IMAGPART_EXPR,
9914 TREE_TYPE (inner_type),
9916 if_false_label, if_true_label);
9919 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9920 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9922 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9923 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9924 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9926 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9932 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9934 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9935 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9937 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9938 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9941 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9942 fold (build (NE_EXPR, TREE_TYPE (exp),
9943 fold (build1 (REALPART_EXPR,
9944 TREE_TYPE (inner_type),
9946 fold (build1 (REALPART_EXPR,
9947 TREE_TYPE (inner_type),
9949 fold (build (NE_EXPR, TREE_TYPE (exp),
9950 fold (build1 (IMAGPART_EXPR,
9951 TREE_TYPE (inner_type),
9953 fold (build1 (IMAGPART_EXPR,
9954 TREE_TYPE (inner_type),
9956 if_false_label, if_true_label);
9959 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9960 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9962 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9963 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9964 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9966 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9971 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9972 if (GET_MODE_CLASS (mode) == MODE_INT
9973 && ! can_compare_p (LT, mode, ccp_jump))
9974 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9976 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9980 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9981 if (GET_MODE_CLASS (mode) == MODE_INT
9982 && ! can_compare_p (LE, mode, ccp_jump))
9983 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9985 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9989 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9990 if (GET_MODE_CLASS (mode) == MODE_INT
9991 && ! can_compare_p (GT, mode, ccp_jump))
9992 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9994 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9998 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9999 if (GET_MODE_CLASS (mode) == MODE_INT
10000 && ! can_compare_p (GE, mode, ccp_jump))
10001 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10003 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10006 case UNORDERED_EXPR:
10009 enum rtx_code cmp, rcmp;
10012 if (code == UNORDERED_EXPR)
10013 cmp = UNORDERED, rcmp = ORDERED;
10015 cmp = ORDERED, rcmp = UNORDERED;
10016 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10019 if (! can_compare_p (cmp, mode, ccp_jump)
10020 && (can_compare_p (rcmp, mode, ccp_jump)
10021 /* If the target doesn't provide either UNORDERED or ORDERED
10022 comparisons, canonicalize on UNORDERED for the library. */
10023 || rcmp == UNORDERED))
10027 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10029 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10034 enum rtx_code rcode1;
10035 enum tree_code tcode2;
10040 goto unordered_bcc;
10044 goto unordered_bcc;
10048 goto unordered_bcc;
10052 goto unordered_bcc;
10056 goto unordered_bcc;
10059 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10060 if (can_compare_p (rcode1, mode, ccp_jump))
10061 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10065 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10066 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10069 /* If the target doesn't support combined unordered
10070 compares, decompose into UNORDERED + comparison. */
10071 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10072 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10073 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10074 do_jump (exp, if_false_label, if_true_label);
10080 __builtin_expect (<test>, 0) and
10081 __builtin_expect (<test>, 1)
10083 We need to do this here, so that <test> is not converted to a SCC
10084 operation on machines that use condition code registers and COMPARE
10085 like the PowerPC, and then the jump is done based on whether the SCC
10086 operation produced a 1 or 0. */
10088 /* Check for a built-in function. */
10089 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10091 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10092 tree arglist = TREE_OPERAND (exp, 1);
10094 if (TREE_CODE (fndecl) == FUNCTION_DECL
10095 && DECL_BUILT_IN (fndecl)
10096 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10097 && arglist != NULL_TREE
10098 && TREE_CHAIN (arglist) != NULL_TREE)
10100 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10103 if (seq != NULL_RTX)
10110 /* fall through and generate the normal code. */
10114 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10116 /* This is not needed any more and causes poor code since it causes
10117 comparisons and tests from non-SI objects to have different code
10119 /* Copy to register to avoid generating bad insns by cse
10120 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10121 if (!cse_not_expected && GET_CODE (temp) == MEM)
10122 temp = copy_to_reg (temp);
10124 do_pending_stack_adjust ();
10125 /* Do any postincrements in the expression that was tested. */
10128 if (GET_CODE (temp) == CONST_INT
10129 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10130 || GET_CODE (temp) == LABEL_REF)
10132 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10134 emit_jump (target);
10136 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10137 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10138 /* Note swapping the labels gives us not-equal. */
10139 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10140 else if (GET_MODE (temp) != VOIDmode)
10141 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10142 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10143 GET_MODE (temp), NULL_RTX,
10144 if_false_label, if_true_label);
10149 if (drop_through_label)
10151 /* If do_jump produces code that might be jumped around,
10152 do any stack adjusts from that code, before the place
10153 where control merges in. */
10154 do_pending_stack_adjust ();
10155 emit_label (drop_through_label);
10159 /* Given a comparison expression EXP for values too wide to be compared
10160 with one insn, test the comparison and jump to the appropriate label.
10161 The code of EXP is ignored; we always test GT if SWAP is 0,
10162 and LT if SWAP is 1. */
10165 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10168 rtx if_false_label, if_true_label;
10170 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10171 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10172 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10173 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10175 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10178 /* Compare OP0 with OP1, word at a time, in mode MODE.
10179 UNSIGNEDP says to do unsigned comparison.
10180 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10183 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10184 enum machine_mode mode;
10187 rtx if_false_label, if_true_label;
10189 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10190 rtx drop_through_label = 0;
10193 if (! if_true_label || ! if_false_label)
10194 drop_through_label = gen_label_rtx ();
10195 if (! if_true_label)
10196 if_true_label = drop_through_label;
10197 if (! if_false_label)
10198 if_false_label = drop_through_label;
10200 /* Compare a word at a time, high order first. */
10201 for (i = 0; i < nwords; i++)
10203 rtx op0_word, op1_word;
10205 if (WORDS_BIG_ENDIAN)
10207 op0_word = operand_subword_force (op0, i, mode);
10208 op1_word = operand_subword_force (op1, i, mode);
10212 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10213 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10216 /* All but high-order word must be compared as unsigned. */
10217 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10218 (unsignedp || i > 0), word_mode, NULL_RTX,
10219 NULL_RTX, if_true_label);
10221 /* Consider lower words only if these are equal. */
10222 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10223 NULL_RTX, NULL_RTX, if_false_label);
10226 if (if_false_label)
10227 emit_jump (if_false_label);
10228 if (drop_through_label)
10229 emit_label (drop_through_label);
10232 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10233 with one insn, test the comparison and jump to the appropriate label. */
10236 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10238 rtx if_false_label, if_true_label;
10240 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10241 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10243 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10245 rtx drop_through_label = 0;
10247 if (! if_false_label)
10248 drop_through_label = if_false_label = gen_label_rtx ();
10250 for (i = 0; i < nwords; i++)
10251 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10252 operand_subword_force (op1, i, mode),
10253 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10254 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10257 emit_jump (if_true_label);
10258 if (drop_through_label)
10259 emit_label (drop_through_label);
10262 /* Jump according to whether OP0 is 0.
10263 We assume that OP0 has an integer mode that is too wide
10264 for the available compare insns. */
10267 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10269 rtx if_false_label, if_true_label;
10271 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10274 rtx drop_through_label = 0;
10276 /* The fastest way of doing this comparison on almost any machine is to
10277 "or" all the words and compare the result. If all have to be loaded
10278 from memory and this is a very wide item, it's possible this may
10279 be slower, but that's highly unlikely. */
10281 part = gen_reg_rtx (word_mode);
10282 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10283 for (i = 1; i < nwords && part != 0; i++)
10284 part = expand_binop (word_mode, ior_optab, part,
10285 operand_subword_force (op0, i, GET_MODE (op0)),
10286 part, 1, OPTAB_WIDEN);
10290 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10291 NULL_RTX, if_false_label, if_true_label);
10296 /* If we couldn't do the "or" simply, do this with a series of compares. */
10297 if (! if_false_label)
10298 drop_through_label = if_false_label = gen_label_rtx ();
10300 for (i = 0; i < nwords; i++)
10301 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10302 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10303 if_false_label, NULL_RTX);
10306 emit_jump (if_true_label);
10308 if (drop_through_label)
10309 emit_label (drop_through_label);
10312 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10313 (including code to compute the values to be compared)
10314 and set (CC0) according to the result.
10315 The decision as to signed or unsigned comparison must be made by the caller.
10317 We force a stack adjustment unless there are currently
10318 things pushed on the stack that aren't yet used.
10320 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10324 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10326 enum rtx_code code;
10328 enum machine_mode mode;
10331 enum rtx_code ucode;
10334 /* If one operand is constant, make it the second one. Only do this
10335 if the other operand is not constant as well. */
10337 if (swap_commutative_operands_p (op0, op1))
10342 code = swap_condition (code);
10345 if (flag_force_mem)
10347 op0 = force_not_mem (op0);
10348 op1 = force_not_mem (op1);
10351 do_pending_stack_adjust ();
10353 ucode = unsignedp ? unsigned_condition (code) : code;
10354 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10358 /* There's no need to do this now that combine.c can eliminate lots of
10359 sign extensions. This can be less efficient in certain cases on other
10362 /* If this is a signed equality comparison, we can do it as an
10363 unsigned comparison since zero-extension is cheaper than sign
10364 extension and comparisons with zero are done as unsigned. This is
10365 the case even on machines that can do fast sign extension, since
10366 zero-extension is easier to combine with other operations than
10367 sign-extension is. If we are comparing against a constant, we must
10368 convert it to what it would look like unsigned. */
10369 if ((code == EQ || code == NE) && ! unsignedp
10370 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10372 if (GET_CODE (op1) == CONST_INT
10373 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10374 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10379 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10382 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10384 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10388 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10389 The decision as to signed or unsigned comparison must be made by the caller.
10391 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10395 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10396 if_false_label, if_true_label)
10398 enum rtx_code code;
10400 enum machine_mode mode;
10402 rtx if_false_label, if_true_label;
10404 enum rtx_code ucode;
10406 int dummy_true_label = 0;
10408 /* Reverse the comparison if that is safe and we want to jump if it is
10410 if (! if_true_label && ! FLOAT_MODE_P (mode))
10412 if_true_label = if_false_label;
10413 if_false_label = 0;
10414 code = reverse_condition (code);
10417 /* If one operand is constant, make it the second one. Only do this
10418 if the other operand is not constant as well. */
10420 if (swap_commutative_operands_p (op0, op1))
10425 code = swap_condition (code);
10428 if (flag_force_mem)
10430 op0 = force_not_mem (op0);
10431 op1 = force_not_mem (op1);
10434 do_pending_stack_adjust ();
10436 ucode = unsignedp ? unsigned_condition (code) : code;
10437 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10439 if (tem == const_true_rtx)
10442 emit_jump (if_true_label);
10446 if (if_false_label)
10447 emit_jump (if_false_label);
10453 /* There's no need to do this now that combine.c can eliminate lots of
10454 sign extensions. This can be less efficient in certain cases on other
10457 /* If this is a signed equality comparison, we can do it as an
10458 unsigned comparison since zero-extension is cheaper than sign
10459 extension and comparisons with zero are done as unsigned. This is
10460 the case even on machines that can do fast sign extension, since
10461 zero-extension is easier to combine with other operations than
10462 sign-extension is. If we are comparing against a constant, we must
10463 convert it to what it would look like unsigned. */
10464 if ((code == EQ || code == NE) && ! unsignedp
10465 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10467 if (GET_CODE (op1) == CONST_INT
10468 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10469 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10474 if (! if_true_label)
10476 dummy_true_label = 1;
10477 if_true_label = gen_label_rtx ();
10480 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10483 if (if_false_label)
10484 emit_jump (if_false_label);
10485 if (dummy_true_label)
10486 emit_label (if_true_label);
10489 /* Generate code for a comparison expression EXP (including code to compute
10490 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10491 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10492 generated code will drop through.
10493 SIGNED_CODE should be the rtx operation for this comparison for
10494 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10496 We force a stack adjustment unless there are currently
10497 things pushed on the stack that aren't yet used. */
10500 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10503 enum rtx_code signed_code, unsigned_code;
10504 rtx if_false_label, if_true_label;
10508 enum machine_mode mode;
10510 enum rtx_code code;
10512 /* Don't crash if the comparison was erroneous. */
10513 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10514 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10517 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10518 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10521 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10522 mode = TYPE_MODE (type);
10523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10524 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10525 || (GET_MODE_BITSIZE (mode)
10526 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10529 /* op0 might have been replaced by promoted constant, in which
10530 case the type of second argument should be used. */
10531 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10532 mode = TYPE_MODE (type);
10534 unsignedp = TREE_UNSIGNED (type);
10535 code = unsignedp ? unsigned_code : signed_code;
10537 #ifdef HAVE_canonicalize_funcptr_for_compare
10538 /* If function pointers need to be "canonicalized" before they can
10539 be reliably compared, then canonicalize them. */
10540 if (HAVE_canonicalize_funcptr_for_compare
10541 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10542 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10545 rtx new_op0 = gen_reg_rtx (mode);
10547 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10551 if (HAVE_canonicalize_funcptr_for_compare
10552 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10553 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10556 rtx new_op1 = gen_reg_rtx (mode);
10558 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10563 /* Do any postincrements in the expression that was tested. */
10566 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10568 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10569 if_false_label, if_true_label);
10572 /* Generate code to calculate EXP using a store-flag instruction
10573 and return an rtx for the result. EXP is either a comparison
10574 or a TRUTH_NOT_EXPR whose operand is a comparison.
10576 If TARGET is nonzero, store the result there if convenient.
10578 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10581 Return zero if there is no suitable set-flag instruction
10582 available on this machine.
10584 Once expand_expr has been called on the arguments of the comparison,
10585 we are committed to doing the store flag, since it is not safe to
10586 re-evaluate the expression. We emit the store-flag insn by calling
10587 emit_store_flag, but only expand the arguments if we have a reason
10588 to believe that emit_store_flag will be successful. If we think that
10589 it will, but it isn't, we have to simulate the store-flag with a
10590 set/jump/set sequence. */
10593 do_store_flag (exp, target, mode, only_cheap)
10596 enum machine_mode mode;
10599 enum rtx_code code;
10600 tree arg0, arg1, type;
10602 enum machine_mode operand_mode;
10606 enum insn_code icode;
10607 rtx subtarget = target;
10610 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10611 result at the end. We can't simply invert the test since it would
10612 have already been inverted if it were valid. This case occurs for
10613 some floating-point comparisons. */
10615 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10616 invert = 1, exp = TREE_OPERAND (exp, 0);
10618 arg0 = TREE_OPERAND (exp, 0);
10619 arg1 = TREE_OPERAND (exp, 1);
10621 /* Don't crash if the comparison was erroneous. */
10622 if (arg0 == error_mark_node || arg1 == error_mark_node)
10625 type = TREE_TYPE (arg0);
10626 operand_mode = TYPE_MODE (type);
10627 unsignedp = TREE_UNSIGNED (type);
10629 /* We won't bother with BLKmode store-flag operations because it would mean
10630 passing a lot of information to emit_store_flag. */
10631 if (operand_mode == BLKmode)
10634 /* We won't bother with store-flag operations involving function pointers
10635 when function pointers must be canonicalized before comparisons. */
10636 #ifdef HAVE_canonicalize_funcptr_for_compare
10637 if (HAVE_canonicalize_funcptr_for_compare
10638 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10639 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10641 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10642 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10643 == FUNCTION_TYPE))))
10650 /* Get the rtx comparison code to use. We know that EXP is a comparison
10651 operation of some type. Some comparisons against 1 and -1 can be
10652 converted to comparisons with zero. Do so here so that the tests
10653 below will be aware that we have a comparison with zero. These
10654 tests will not catch constants in the first operand, but constants
10655 are rarely passed as the first operand. */
10657 switch (TREE_CODE (exp))
10666 if (integer_onep (arg1))
10667 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10669 code = unsignedp ? LTU : LT;
10672 if (! unsignedp && integer_all_onesp (arg1))
10673 arg1 = integer_zero_node, code = LT;
10675 code = unsignedp ? LEU : LE;
10678 if (! unsignedp && integer_all_onesp (arg1))
10679 arg1 = integer_zero_node, code = GE;
10681 code = unsignedp ? GTU : GT;
10684 if (integer_onep (arg1))
10685 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10687 code = unsignedp ? GEU : GE;
10690 case UNORDERED_EXPR:
10716 /* Put a constant second. */
10717 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10719 tem = arg0; arg0 = arg1; arg1 = tem;
10720 code = swap_condition (code);
10723 /* If this is an equality or inequality test of a single bit, we can
10724 do this by shifting the bit being tested to the low-order bit and
10725 masking the result with the constant 1. If the condition was EQ,
10726 we xor it with 1. This does not require an scc insn and is faster
10727 than an scc insn even if we have it. */
10729 if ((code == NE || code == EQ)
10730 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10731 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10733 tree inner = TREE_OPERAND (arg0, 0);
10734 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10737 /* If INNER is a right shift of a constant and it plus BITNUM does
10738 not overflow, adjust BITNUM and INNER. */
10740 if (TREE_CODE (inner) == RSHIFT_EXPR
10741 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10742 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10743 && bitnum < TYPE_PRECISION (type)
10744 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10745 bitnum - TYPE_PRECISION (type)))
10747 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10748 inner = TREE_OPERAND (inner, 0);
10751 /* If we are going to be able to omit the AND below, we must do our
10752 operations as unsigned. If we must use the AND, we have a choice.
10753 Normally unsigned is faster, but for some machines signed is. */
10754 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10755 #ifdef LOAD_EXTEND_OP
10756 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10762 if (! get_subtarget (subtarget)
10763 || GET_MODE (subtarget) != operand_mode
10764 || ! safe_from_p (subtarget, inner, 1))
10767 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10770 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10771 size_int (bitnum), subtarget, ops_unsignedp);
10773 if (GET_MODE (op0) != mode)
10774 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10776 if ((code == EQ && ! invert) || (code == NE && invert))
10777 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10778 ops_unsignedp, OPTAB_LIB_WIDEN);
10780 /* Put the AND last so it can combine with more things. */
10781 if (bitnum != TYPE_PRECISION (type) - 1)
10782 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10787 /* Now see if we are likely to be able to do this. Return if not. */
10788 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10791 icode = setcc_gen_code[(int) code];
10792 if (icode == CODE_FOR_nothing
10793 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10795 /* We can only do this if it is one of the special cases that
10796 can be handled without an scc insn. */
10797 if ((code == LT && integer_zerop (arg1))
10798 || (! only_cheap && code == GE && integer_zerop (arg1)))
10800 else if (BRANCH_COST >= 0
10801 && ! only_cheap && (code == NE || code == EQ)
10802 && TREE_CODE (type) != REAL_TYPE
10803 && ((abs_optab->handlers[(int) operand_mode].insn_code
10804 != CODE_FOR_nothing)
10805 || (ffs_optab->handlers[(int) operand_mode].insn_code
10806 != CODE_FOR_nothing)))
10812 if (! get_subtarget (target)
10813 || GET_MODE (subtarget) != operand_mode
10814 || ! safe_from_p (subtarget, arg1, 1))
10817 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10818 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10821 target = gen_reg_rtx (mode);
10823 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10824 because, if the emit_store_flag does anything it will succeed and
10825 OP0 and OP1 will not be used subsequently. */
10827 result = emit_store_flag (target, code,
10828 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10829 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10830 operand_mode, unsignedp, 1);
10835 result = expand_binop (mode, xor_optab, result, const1_rtx,
10836 result, 0, OPTAB_LIB_WIDEN);
10840 /* If this failed, we have to do this with set/compare/jump/set code. */
10841 if (GET_CODE (target) != REG
10842 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10843 target = gen_reg_rtx (GET_MODE (target));
10845 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10846 result = compare_from_rtx (op0, op1, code, unsignedp,
10847 operand_mode, NULL_RTX);
10848 if (GET_CODE (result) == CONST_INT)
10849 return (((result == const0_rtx && ! invert)
10850 || (result != const0_rtx && invert))
10851 ? const0_rtx : const1_rtx);
10853 /* The code of RESULT may not match CODE if compare_from_rtx
10854 decided to swap its operands and reverse the original code.
10856 We know that compare_from_rtx returns either a CONST_INT or
10857 a new comparison code, so it is safe to just extract the
10858 code from RESULT. */
10859 code = GET_CODE (result);
10861 label = gen_label_rtx ();
10862 if (bcc_gen_fctn[(int) code] == 0)
10865 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10866 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10867 emit_label (label);
10873 /* Stubs in case we haven't got a casesi insn. */
10874 #ifndef HAVE_casesi
10875 # define HAVE_casesi 0
10876 # define gen_casesi(a, b, c, d, e) (0)
10877 # define CODE_FOR_casesi CODE_FOR_nothing
10880 /* If the machine does not have a case insn that compares the bounds,
10881 this means extra overhead for dispatch tables, which raises the
10882 threshold for using them. */
10883 #ifndef CASE_VALUES_THRESHOLD
10884 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10885 #endif /* CASE_VALUES_THRESHOLD */
10888 case_values_threshold ()
10890 return CASE_VALUES_THRESHOLD;
10893 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10894 0 otherwise (i.e. if there is no casesi instruction). */
10896 try_casesi (index_type, index_expr, minval, range,
10897 table_label, default_label)
10898 tree index_type, index_expr, minval, range;
10899 rtx table_label ATTRIBUTE_UNUSED;
10902 enum machine_mode index_mode = SImode;
10903 int index_bits = GET_MODE_BITSIZE (index_mode);
10904 rtx op1, op2, index;
10905 enum machine_mode op_mode;
10910 /* Convert the index to SImode. */
10911 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10913 enum machine_mode omode = TYPE_MODE (index_type);
10914 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10916 /* We must handle the endpoints in the original mode. */
10917 index_expr = build (MINUS_EXPR, index_type,
10918 index_expr, minval);
10919 minval = integer_zero_node;
10920 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10921 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10922 omode, 1, default_label);
10923 /* Now we can safely truncate. */
10924 index = convert_to_mode (index_mode, index, 0);
10928 if (TYPE_MODE (index_type) != index_mode)
10930 index_expr = convert ((*lang_hooks.types.type_for_size)
10931 (index_bits, 0), index_expr);
10932 index_type = TREE_TYPE (index_expr);
10935 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10938 index = protect_from_queue (index, 0);
10939 do_pending_stack_adjust ();
10941 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10942 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10944 index = copy_to_mode_reg (op_mode, index);
10946 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10948 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10949 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10950 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10951 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10953 op1 = copy_to_mode_reg (op_mode, op1);
10955 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10957 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10958 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10959 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10960 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10962 op2 = copy_to_mode_reg (op_mode, op2);
10964 emit_jump_insn (gen_casesi (index, op1, op2,
10965 table_label, default_label));
10969 /* Attempt to generate a tablejump instruction; same concept. */
10970 #ifndef HAVE_tablejump
10971 #define HAVE_tablejump 0
10972 #define gen_tablejump(x, y) (0)
10975 /* Subroutine of the next function.
10977 INDEX is the value being switched on, with the lowest value
10978 in the table already subtracted.
10979 MODE is its expected mode (needed if INDEX is constant).
10980 RANGE is the length of the jump table.
10981 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10983 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10984 index value is out of range. */
10987 do_tablejump (index, mode, range, table_label, default_label)
10988 rtx index, range, table_label, default_label;
10989 enum machine_mode mode;
10993 if (INTVAL (range) > cfun->max_jumptable_ents)
10994 cfun->max_jumptable_ents = INTVAL (range);
10996 /* Do an unsigned comparison (in the proper mode) between the index
10997 expression and the value which represents the length of the range.
10998 Since we just finished subtracting the lower bound of the range
10999 from the index expression, this comparison allows us to simultaneously
11000 check that the original index expression value is both greater than
11001 or equal to the minimum value of the range and less than or equal to
11002 the maximum value of the range. */
11004 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11007 /* If index is in range, it must fit in Pmode.
11008 Convert to Pmode so we can index with it. */
11010 index = convert_to_mode (Pmode, index, 1);
11012 /* Don't let a MEM slip thru, because then INDEX that comes
11013 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11014 and break_out_memory_refs will go to work on it and mess it up. */
11015 #ifdef PIC_CASE_VECTOR_ADDRESS
11016 if (flag_pic && GET_CODE (index) != REG)
11017 index = copy_to_mode_reg (Pmode, index);
11020 /* If flag_force_addr were to affect this address
11021 it could interfere with the tricky assumptions made
11022 about addresses that contain label-refs,
11023 which may be valid only very near the tablejump itself. */
11024 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11025 GET_MODE_SIZE, because this indicates how large insns are. The other
11026 uses should all be Pmode, because they are addresses. This code
11027 could fail if addresses and insns are not the same size. */
11028 index = gen_rtx_PLUS (Pmode,
11029 gen_rtx_MULT (Pmode, index,
11030 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11031 gen_rtx_LABEL_REF (Pmode, table_label));
11032 #ifdef PIC_CASE_VECTOR_ADDRESS
11034 index = PIC_CASE_VECTOR_ADDRESS (index);
11037 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11038 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11039 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11040 RTX_UNCHANGING_P (vector) = 1;
11041 convert_move (temp, vector, 0);
11043 emit_jump_insn (gen_tablejump (temp, table_label));
11045 /* If we are generating PIC code or if the table is PC-relative, the
11046 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11047 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11052 try_tablejump (index_type, index_expr, minval, range,
11053 table_label, default_label)
11054 tree index_type, index_expr, minval, range;
11055 rtx table_label, default_label;
11059 if (! HAVE_tablejump)
11062 index_expr = fold (build (MINUS_EXPR, index_type,
11063 convert (index_type, index_expr),
11064 convert (index_type, minval)));
11065 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11067 index = protect_from_queue (index, 0);
11068 do_pending_stack_adjust ();
11070 do_tablejump (index, TYPE_MODE (index_type),
11071 convert_modes (TYPE_MODE (index_type),
11072 TYPE_MODE (TREE_TYPE (range)),
11073 expand_expr (range, NULL_RTX,
11075 TREE_UNSIGNED (TREE_TYPE (range))),
11076 table_label, default_label);
11080 /* Nonzero if the mode is a valid vector mode for this architecture.
11081 This returns nonzero even if there is no hardware support for the
11082 vector mode, but we can emulate with narrower modes. */
11085 vector_mode_valid_p (mode)
11086 enum machine_mode mode;
11088 enum mode_class class = GET_MODE_CLASS (mode);
11089 enum machine_mode innermode;
11091 /* Doh! What's going on? */
11092 if (class != MODE_VECTOR_INT
11093 && class != MODE_VECTOR_FLOAT)
11096 /* Hardware support. Woo hoo! */
11097 if (VECTOR_MODE_SUPPORTED_P (mode))
11100 innermode = GET_MODE_INNER (mode);
11102 /* We should probably return 1 if requesting V4DI and we have no DI,
11103 but we have V2DI, but this is probably very unlikely. */
11105 /* If we have support for the inner mode, we can safely emulate it.
11106 We may not have V2DI, but me can emulate with a pair of DIs. */
11107 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11110 #include "gt-expr.h"