1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
174 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
175 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
177 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
179 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
181 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (to_real != from_real)
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
622 emit_unop_insn (code, to, from, UNKNOWN);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
798 libcall = truncdfsf2_libfunc;
802 libcall = extenddfxf2_libfunc;
806 libcall = extenddftf2_libfunc;
818 libcall = truncxfsf2_libfunc;
822 libcall = truncxfdf2_libfunc;
834 libcall = trunctfsf2_libfunc;
838 libcall = trunctfdf2_libfunc;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
857 insns = get_insns ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
926 fill_value = const0_rtx;
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1136 enum machine_mode intermediate;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1166 emit_move_insn (to, tmp);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1410 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1412 /* ?? If we don't know OLDMODE, we have to assume here that
1413 X does not need sign- or zero-extension. This may not be
1414 the case, but it's the best we can do. */
1415 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1416 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1418 HOST_WIDE_INT val = INTVAL (x);
1419 int width = GET_MODE_BITSIZE (oldmode);
1421 /* We must sign or zero-extend in this case. Start by
1422 zero-extending, then sign extend if we need to. */
1423 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1425 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1426 val |= (HOST_WIDE_INT) (-1) << width;
1428 return gen_int_mode (val, mode);
1431 return gen_lowpart (mode, x);
1434 temp = gen_reg_rtx (mode);
1435 convert_move (temp, x, unsignedp);
1439 /* This macro is used to determine what the largest unit size that
1440 move_by_pieces can use is. */
1442 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1443 move efficiently, as opposed to MOVE_MAX which is the maximum
1444 number of bytes we can move with a single instruction. */
1446 #ifndef MOVE_MAX_PIECES
1447 #define MOVE_MAX_PIECES MOVE_MAX
1450 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1451 store efficiently. Due to internal GCC limitations, this is
1452 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1453 for an immediate constant. */
1455 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1457 /* Generate several move instructions to copy LEN bytes from block FROM to
1458 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1459 and TO through protect_from_queue before calling.
1461 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1462 used to push FROM to the stack.
1464 ALIGN is maximum alignment we can assume. */
1467 move_by_pieces (to, from, len, align)
1469 unsigned HOST_WIDE_INT len;
1472 struct move_by_pieces data;
1473 rtx to_addr, from_addr = XEXP (from, 0);
1474 unsigned int max_size = MOVE_MAX_PIECES + 1;
1475 enum machine_mode mode = VOIDmode, tmode;
1476 enum insn_code icode;
1479 data.from_addr = from_addr;
1482 to_addr = XEXP (to, 0);
1485 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1486 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1488 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1495 #ifdef STACK_GROWS_DOWNWARD
1501 data.to_addr = to_addr;
1504 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1505 || GET_CODE (from_addr) == POST_INC
1506 || GET_CODE (from_addr) == POST_DEC);
1508 data.explicit_inc_from = 0;
1509 data.explicit_inc_to = 0;
1510 if (data.reverse) data.offset = len;
1513 /* If copying requires more than two move insns,
1514 copy addresses to registers (to make displacements shorter)
1515 and use post-increment if available. */
1516 if (!(data.autinc_from && data.autinc_to)
1517 && move_by_pieces_ninsns (len, align) > 2)
1519 /* Find the mode of the largest move... */
1520 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1521 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1522 if (GET_MODE_SIZE (tmode) < max_size)
1525 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1527 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1528 data.autinc_from = 1;
1529 data.explicit_inc_from = -1;
1531 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1533 data.from_addr = copy_addr_to_reg (from_addr);
1534 data.autinc_from = 1;
1535 data.explicit_inc_from = 1;
1537 if (!data.autinc_from && CONSTANT_P (from_addr))
1538 data.from_addr = copy_addr_to_reg (from_addr);
1539 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1541 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1543 data.explicit_inc_to = -1;
1545 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1547 data.to_addr = copy_addr_to_reg (to_addr);
1549 data.explicit_inc_to = 1;
1551 if (!data.autinc_to && CONSTANT_P (to_addr))
1552 data.to_addr = copy_addr_to_reg (to_addr);
1555 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1556 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1557 align = MOVE_MAX * BITS_PER_UNIT;
1559 /* First move what we can in the largest integer mode, then go to
1560 successively smaller modes. */
1562 while (max_size > 1)
1564 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1565 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1566 if (GET_MODE_SIZE (tmode) < max_size)
1569 if (mode == VOIDmode)
1572 icode = mov_optab->handlers[(int) mode].insn_code;
1573 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1574 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1576 max_size = GET_MODE_SIZE (mode);
1579 /* The code above should have handled everything. */
1584 /* Return number of insns required to move L bytes by pieces.
1585 ALIGN (in bits) is maximum alignment we can assume. */
1587 static unsigned HOST_WIDE_INT
1588 move_by_pieces_ninsns (l, align)
1589 unsigned HOST_WIDE_INT l;
1592 unsigned HOST_WIDE_INT n_insns = 0;
1593 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1595 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1596 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1597 align = MOVE_MAX * BITS_PER_UNIT;
1599 while (max_size > 1)
1601 enum machine_mode mode = VOIDmode, tmode;
1602 enum insn_code icode;
1604 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1605 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1606 if (GET_MODE_SIZE (tmode) < max_size)
1609 if (mode == VOIDmode)
1612 icode = mov_optab->handlers[(int) mode].insn_code;
1613 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1614 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1616 max_size = GET_MODE_SIZE (mode);
1624 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1625 with move instructions for mode MODE. GENFUN is the gen_... function
1626 to make a move insn for that mode. DATA has all the other info. */
1629 move_by_pieces_1 (genfun, mode, data)
1630 rtx (*genfun) PARAMS ((rtx, ...));
1631 enum machine_mode mode;
1632 struct move_by_pieces *data;
1634 unsigned int size = GET_MODE_SIZE (mode);
1635 rtx to1 = NULL_RTX, from1;
1637 while (data->len >= size)
1640 data->offset -= size;
1644 if (data->autinc_to)
1645 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 to1 = adjust_address (data->to, mode, data->offset);
1651 if (data->autinc_from)
1652 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 from1 = adjust_address (data->from, mode, data->offset);
1657 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1658 emit_insn (gen_add2_insn (data->to_addr,
1659 GEN_INT (-(HOST_WIDE_INT)size)));
1660 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1661 emit_insn (gen_add2_insn (data->from_addr,
1662 GEN_INT (-(HOST_WIDE_INT)size)));
1665 emit_insn ((*genfun) (to1, from1));
1668 #ifdef PUSH_ROUNDING
1669 emit_single_push_insn (mode, from1, NULL);
1675 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1676 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1678 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1680 if (! data->reverse)
1681 data->offset += size;
1687 /* Emit code to move a block Y to a block X. This may be done with
1688 string-move instructions, with multiple scalar move instructions,
1689 or with a library call.
1691 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1692 SIZE is an rtx that says how long they are.
1693 ALIGN is the maximum alignment we can assume they have.
1694 METHOD describes what kind of copy this is, and what mechanisms may be used.
1696 Return the address of the new block, if memcpy is called and returns it,
1700 emit_block_move (x, y, size, method)
1702 enum block_op_methods method;
1710 case BLOCK_OP_NORMAL:
1711 may_use_call = true;
1714 case BLOCK_OP_CALL_PARM:
1715 may_use_call = block_move_libcall_safe_for_call_parm ();
1717 /* Make inhibit_defer_pop nonzero around the library call
1718 to force it to pop the arguments right away. */
1722 case BLOCK_OP_NO_LIBCALL:
1723 may_use_call = false;
1730 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1732 if (GET_MODE (x) != BLKmode)
1734 if (GET_MODE (y) != BLKmode)
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1739 size = protect_from_queue (size, 0);
1741 if (GET_CODE (x) != MEM)
1743 if (GET_CODE (y) != MEM)
1748 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1749 can be incorrect is coming from __builtin_memcpy. */
1750 if (GET_CODE (size) == CONST_INT)
1752 x = shallow_copy_rtx (x);
1753 y = shallow_copy_rtx (y);
1754 set_mem_size (x, size);
1755 set_mem_size (y, size);
1758 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1759 move_by_pieces (x, y, INTVAL (size), align);
1760 else if (emit_block_move_via_movstr (x, y, size, align))
1762 else if (may_use_call)
1763 retval = emit_block_move_via_libcall (x, y, size);
1765 emit_block_move_via_loop (x, y, size, align);
1767 if (method == BLOCK_OP_CALL_PARM)
1773 /* A subroutine of emit_block_move. Returns true if calling the
1774 block move libcall will not clobber any parameters which may have
1775 already been placed on the stack. */
1778 block_move_libcall_safe_for_call_parm ()
1784 /* Check to see whether memcpy takes all register arguments. */
1786 takes_regs_uninit, takes_regs_no, takes_regs_yes
1787 } takes_regs = takes_regs_uninit;
1791 case takes_regs_uninit:
1793 CUMULATIVE_ARGS args_so_far;
1796 fn = emit_block_move_libcall_fn (false);
1797 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1799 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1800 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1802 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1803 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1804 if (!tmp || !REG_P (tmp))
1805 goto fail_takes_regs;
1806 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1807 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1809 goto fail_takes_regs;
1811 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 takes_regs = takes_regs_yes;
1817 case takes_regs_yes:
1821 takes_regs = takes_regs_no;
1832 /* A subroutine of emit_block_move. Expand a movstr pattern;
1833 return true if successful. */
1836 emit_block_move_via_movstr (x, y, size, align)
1840 /* Try the most limited insn first, because there's no point
1841 including more than one in the machine description unless
1842 the more limited one has some advantage. */
1844 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1845 enum machine_mode mode;
1847 /* Since this is a move insn, we don't care about volatility. */
1850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1851 mode = GET_MODE_WIDER_MODE (mode))
1853 enum insn_code code = movstr_optab[(int) mode];
1854 insn_operand_predicate_fn pred;
1856 if (code != CODE_FOR_nothing
1857 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1858 here because if SIZE is less than the mode mask, as it is
1859 returned by the macro, it will definitely be less than the
1860 actual mode mask. */
1861 && ((GET_CODE (size) == CONST_INT
1862 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1863 <= (GET_MODE_MASK (mode) >> 1)))
1864 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1865 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1866 || (*pred) (x, BLKmode))
1867 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1868 || (*pred) (y, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1870 || (*pred) (opalign, VOIDmode)))
1873 rtx last = get_last_insn ();
1876 op2 = convert_to_mode (mode, size, 1);
1877 pred = insn_data[(int) code].operand[2].predicate;
1878 if (pred != 0 && ! (*pred) (op2, mode))
1879 op2 = copy_to_mode_reg (mode, op2);
1881 /* ??? When called via emit_block_move_for_call, it'd be
1882 nice if there were some way to inform the backend, so
1883 that it doesn't fail the expansion because it thinks
1884 emitting the libcall would be more efficient. */
1886 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1894 delete_insns_since (last);
1902 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1903 Return the return value from memcpy, 0 otherwise. */
1906 emit_block_move_via_libcall (dst, src, size)
1909 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1910 enum machine_mode size_mode;
1913 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1915 It is unsafe to save the value generated by protect_from_queue
1916 and reuse it later. Consider what happens if emit_queue is
1917 called before the return value from protect_from_queue is used.
1919 Expansion of the CALL_EXPR below will call emit_queue before
1920 we are finished emitting RTL for argument setup. So if we are
1921 not careful we could get the wrong value for an argument.
1923 To avoid this problem we go ahead and emit code to copy X, Y &
1924 SIZE into new pseudos. We can then place those new pseudos
1925 into an RTL_EXPR and use them later, even after a call to
1928 Note this is not strictly needed for library calls since they
1929 do not call emit_queue before loading their arguments. However,
1930 we may need to have library calls call emit_queue in the future
1931 since failing to do so could cause problems for targets which
1932 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1934 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1935 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1937 if (TARGET_MEM_FUNCTIONS)
1938 size_mode = TYPE_MODE (sizetype);
1940 size_mode = TYPE_MODE (unsigned_type_node);
1941 size = convert_to_mode (size_mode, size, 1);
1942 size = copy_to_mode_reg (size_mode, size);
1944 /* It is incorrect to use the libcall calling conventions to call
1945 memcpy in this context. This could be a user call to memcpy and
1946 the user may wish to examine the return value from memcpy. For
1947 targets where libcalls and normal calls have different conventions
1948 for returning pointers, we could end up generating incorrect code.
1950 For convenience, we generate the call to bcopy this way as well. */
1952 dst_tree = make_tree (ptr_type_node, dst);
1953 src_tree = make_tree (ptr_type_node, src);
1954 if (TARGET_MEM_FUNCTIONS)
1955 size_tree = make_tree (sizetype, size);
1957 size_tree = make_tree (unsigned_type_node, size);
1959 fn = emit_block_move_libcall_fn (true);
1960 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1961 if (TARGET_MEM_FUNCTIONS)
1963 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1964 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1969 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 /* Now we have to build up the CALL_EXPR itself. */
1973 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1974 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1975 call_expr, arg_list, NULL_TREE);
1976 TREE_SIDE_EFFECTS (call_expr) = 1;
1978 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1980 /* If we are initializing a readonly value, show the above call
1981 clobbered it. Otherwise, a load from it may erroneously be
1982 hoisted from a loop. */
1983 if (RTX_UNCHANGING_P (dst))
1984 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1986 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1990 for the function we use for block copies. The first time FOR_CALL
1991 is true, we call assemble_external. */
1993 static GTY(()) tree block_move_fn;
1996 emit_block_move_libcall_fn (for_call)
1999 static bool emitted_extern;
2000 tree fn = block_move_fn, args;
2004 if (TARGET_MEM_FUNCTIONS)
2006 fn = get_identifier ("memcpy");
2007 args = build_function_type_list (ptr_type_node, ptr_type_node,
2008 const_ptr_type_node, sizetype,
2013 fn = get_identifier ("bcopy");
2014 args = build_function_type_list (void_type_node, const_ptr_type_node,
2015 ptr_type_node, unsigned_type_node,
2019 fn = build_decl (FUNCTION_DECL, fn, args);
2020 DECL_EXTERNAL (fn) = 1;
2021 TREE_PUBLIC (fn) = 1;
2022 DECL_ARTIFICIAL (fn) = 1;
2023 TREE_NOTHROW (fn) = 1;
2028 if (for_call && !emitted_extern)
2030 emitted_extern = true;
2031 make_decl_rtl (fn, NULL);
2032 assemble_external (fn);
2038 /* A subroutine of emit_block_move. Copy the data via an explicit
2039 loop. This is used only when libcalls are forbidden. */
2040 /* ??? It'd be nice to copy in hunks larger than QImode. */
2043 emit_block_move_via_loop (x, y, size, align)
2045 unsigned int align ATTRIBUTE_UNUSED;
2047 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2048 enum machine_mode iter_mode;
2050 iter_mode = GET_MODE (size);
2051 if (iter_mode == VOIDmode)
2052 iter_mode = word_mode;
2054 top_label = gen_label_rtx ();
2055 cmp_label = gen_label_rtx ();
2056 iter = gen_reg_rtx (iter_mode);
2058 emit_move_insn (iter, const0_rtx);
2060 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2061 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2062 do_pending_stack_adjust ();
2064 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2066 emit_jump (cmp_label);
2067 emit_label (top_label);
2069 tmp = convert_modes (Pmode, iter_mode, iter, true);
2070 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2071 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2072 x = change_address (x, QImode, x_addr);
2073 y = change_address (y, QImode, y_addr);
2075 emit_move_insn (x, y);
2077 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2078 true, OPTAB_LIB_WIDEN);
2080 emit_move_insn (iter, tmp);
2082 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2083 emit_label (cmp_label);
2085 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2088 emit_note (NULL, NOTE_INSN_LOOP_END);
2091 /* Copy all or part of a value X into registers starting at REGNO.
2092 The number of registers to be filled is NREGS. */
2095 move_block_to_reg (regno, x, nregs, mode)
2099 enum machine_mode mode;
2102 #ifdef HAVE_load_multiple
2110 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2111 x = validize_mem (force_const_mem (mode, x));
2113 /* See if the machine can do this with a load multiple insn. */
2114 #ifdef HAVE_load_multiple
2115 if (HAVE_load_multiple)
2117 last = get_last_insn ();
2118 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2126 delete_insns_since (last);
2130 for (i = 0; i < nregs; i++)
2131 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2132 operand_subword_force (x, i, mode));
2135 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2136 The number of registers to be filled is NREGS. SIZE indicates the number
2137 of bytes in the object X. */
2140 move_block_from_reg (regno, x, nregs, size)
2147 #ifdef HAVE_store_multiple
2151 enum machine_mode mode;
2156 /* If SIZE is that of a mode no bigger than a word, just use that
2157 mode's store operation. */
2158 if (size <= UNITS_PER_WORD
2159 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2161 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2165 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2166 to the left before storing to memory. Note that the previous test
2167 doesn't handle all cases (e.g. SIZE == 3). */
2168 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2170 rtx tem = operand_subword (x, 0, 1, BLKmode);
2176 shift = expand_shift (LSHIFT_EXPR, word_mode,
2177 gen_rtx_REG (word_mode, regno),
2178 build_int_2 ((UNITS_PER_WORD - size)
2179 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2180 emit_move_insn (tem, shift);
2184 /* See if the machine can do this with a store multiple insn. */
2185 #ifdef HAVE_store_multiple
2186 if (HAVE_store_multiple)
2188 last = get_last_insn ();
2189 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2197 delete_insns_since (last);
2201 for (i = 0; i < nregs; i++)
2203 rtx tem = operand_subword (x, i, 1, BLKmode);
2208 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2212 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2213 ORIG, where ORIG is a non-consecutive group of registers represented by
2214 a PARALLEL. The clone is identical to the original except in that the
2215 original set of registers is replaced by a new set of pseudo registers.
2216 The new set has the same modes as the original set. */
2219 gen_group_rtx (orig)
2225 if (GET_CODE (orig) != PARALLEL)
2228 length = XVECLEN (orig, 0);
2229 tmps = (rtx *) alloca (sizeof (rtx) * length);
2231 /* Skip a NULL entry in first slot. */
2232 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2237 for (; i < length; i++)
2239 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2240 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2242 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2249 registers represented by a PARALLEL. SSIZE represents the total size of
2250 block SRC in bytes, or -1 if not known. */
2251 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2252 the balance will be in what would be the low-order memory addresses, i.e.
2253 left justified for big endian, right justified for little endian. This
2254 happens to be true for the targets currently using this support. If this
2255 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2259 emit_group_load (dst, orig_src, ssize)
2266 if (GET_CODE (dst) != PARALLEL)
2269 /* Check for a NULL entry, used to indicate that the parameter goes
2270 both on the stack and in registers. */
2271 if (XEXP (XVECEXP (dst, 0, 0), 0))
2276 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2278 /* Process the pieces. */
2279 for (i = start; i < XVECLEN (dst, 0); i++)
2281 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2282 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2283 unsigned int bytelen = GET_MODE_SIZE (mode);
2286 /* Handle trailing fragments that run over the size of the struct. */
2287 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2289 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2290 bytelen = ssize - bytepos;
2295 /* If we won't be loading directly from memory, protect the real source
2296 from strange tricks we might play; but make sure that the source can
2297 be loaded directly into the destination. */
2299 if (GET_CODE (orig_src) != MEM
2300 && (!CONSTANT_P (orig_src)
2301 || (GET_MODE (orig_src) != mode
2302 && GET_MODE (orig_src) != VOIDmode)))
2304 if (GET_MODE (orig_src) == VOIDmode)
2305 src = gen_reg_rtx (mode);
2307 src = gen_reg_rtx (GET_MODE (orig_src));
2309 emit_move_insn (src, orig_src);
2312 /* Optimize the access just a bit. */
2313 if (GET_CODE (src) == MEM
2314 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2315 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2316 && bytelen == GET_MODE_SIZE (mode))
2318 tmps[i] = gen_reg_rtx (mode);
2319 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2321 else if (GET_CODE (src) == CONCAT)
2323 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2324 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2326 if ((bytepos == 0 && bytelen == slen0)
2327 || (bytepos != 0 && bytepos + bytelen <= slen))
2329 /* The following assumes that the concatenated objects all
2330 have the same size. In this case, a simple calculation
2331 can be used to determine the object and the bit field
2333 tmps[i] = XEXP (src, bytepos / slen0);
2334 if (! CONSTANT_P (tmps[i])
2335 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2336 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2337 (bytepos % slen0) * BITS_PER_UNIT,
2338 1, NULL_RTX, mode, mode, ssize);
2340 else if (bytepos == 0)
2342 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2343 emit_move_insn (mem, src);
2344 tmps[i] = adjust_address (mem, mode, 0);
2349 else if (CONSTANT_P (src)
2350 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2353 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2354 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2357 if (BYTES_BIG_ENDIAN && shift)
2358 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2359 tmps[i], 0, OPTAB_WIDEN);
2364 /* Copy the extracted pieces into the proper (probable) hard regs. */
2365 for (i = start; i < XVECLEN (dst, 0); i++)
2366 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2369 /* Emit code to move a block SRC to block DST, where SRC and DST are
2370 non-consecutive groups of registers, each represented by a PARALLEL. */
2373 emit_group_move (dst, src)
2378 if (GET_CODE (src) != PARALLEL
2379 || GET_CODE (dst) != PARALLEL
2380 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2383 /* Skip first entry if NULL. */
2384 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2385 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2386 XEXP (XVECEXP (src, 0, i), 0));
2389 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2390 registers represented by a PARALLEL. SSIZE represents the total size of
2391 block DST, or -1 if not known. */
2394 emit_group_store (orig_dst, src, ssize)
2401 if (GET_CODE (src) != PARALLEL)
2404 /* Check for a NULL entry, used to indicate that the parameter goes
2405 both on the stack and in registers. */
2406 if (XEXP (XVECEXP (src, 0, 0), 0))
2411 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2413 /* Copy the (probable) hard regs into pseudos. */
2414 for (i = start; i < XVECLEN (src, 0); i++)
2416 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2417 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2418 emit_move_insn (tmps[i], reg);
2422 /* If we won't be storing directly into memory, protect the real destination
2423 from strange tricks we might play. */
2425 if (GET_CODE (dst) == PARALLEL)
2429 /* We can get a PARALLEL dst if there is a conditional expression in
2430 a return statement. In that case, the dst and src are the same,
2431 so no action is necessary. */
2432 if (rtx_equal_p (dst, src))
2435 /* It is unclear if we can ever reach here, but we may as well handle
2436 it. Allocate a temporary, and split this into a store/load to/from
2439 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2440 emit_group_store (temp, src, ssize);
2441 emit_group_load (dst, temp, ssize);
2444 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2446 dst = gen_reg_rtx (GET_MODE (orig_dst));
2447 /* Make life a bit easier for combine. */
2448 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2451 /* Process the pieces. */
2452 for (i = start; i < XVECLEN (src, 0); i++)
2454 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2455 enum machine_mode mode = GET_MODE (tmps[i]);
2456 unsigned int bytelen = GET_MODE_SIZE (mode);
2459 /* Handle trailing fragments that run over the size of the struct. */
2460 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2462 if (BYTES_BIG_ENDIAN)
2464 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2465 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2466 tmps[i], 0, OPTAB_WIDEN);
2468 bytelen = ssize - bytepos;
2471 if (GET_CODE (dst) == CONCAT)
2473 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2474 dest = XEXP (dst, 0);
2475 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2477 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2478 dest = XEXP (dst, 1);
2484 /* Optimize the access just a bit. */
2485 if (GET_CODE (dest) == MEM
2486 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2487 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2488 && bytelen == GET_MODE_SIZE (mode))
2489 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2491 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2492 mode, tmps[i], ssize);
2497 /* Copy from the pseudo into the (probable) hard reg. */
2498 if (GET_CODE (dst) == REG)
2499 emit_move_insn (orig_dst, dst);
2502 /* Generate code to copy a BLKmode object of TYPE out of a
2503 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2504 is null, a stack temporary is created. TGTBLK is returned.
2506 The primary purpose of this routine is to handle functions
2507 that return BLKmode structures in registers. Some machines
2508 (the PA for example) want to return all small structures
2509 in registers regardless of the structure's alignment. */
2512 copy_blkmode_from_reg (tgtblk, srcreg, type)
2517 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2518 rtx src = NULL, dst = NULL;
2519 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2520 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2524 tgtblk = assign_temp (build_qualified_type (type,
2526 | TYPE_QUAL_CONST)),
2528 preserve_temp_slots (tgtblk);
2531 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2532 into a new pseudo which is a full word. */
2534 if (GET_MODE (srcreg) != BLKmode
2535 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2536 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2538 /* Structures whose size is not a multiple of a word are aligned
2539 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2540 machine, this means we must skip the empty high order bytes when
2541 calculating the bit offset. */
2542 if (BYTES_BIG_ENDIAN
2543 && bytes % UNITS_PER_WORD)
2544 big_endian_correction
2545 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2547 /* Copy the structure BITSIZE bites at a time.
2549 We could probably emit more efficient code for machines which do not use
2550 strict alignment, but it doesn't seem worth the effort at the current
2552 for (bitpos = 0, xbitpos = big_endian_correction;
2553 bitpos < bytes * BITS_PER_UNIT;
2554 bitpos += bitsize, xbitpos += bitsize)
2556 /* We need a new source operand each time xbitpos is on a
2557 word boundary and when xbitpos == big_endian_correction
2558 (the first time through). */
2559 if (xbitpos % BITS_PER_WORD == 0
2560 || xbitpos == big_endian_correction)
2561 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2564 /* We need a new destination operand each time bitpos is on
2566 if (bitpos % BITS_PER_WORD == 0)
2567 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2569 /* Use xbitpos for the source extraction (right justified) and
2570 xbitpos for the destination store (left justified). */
2571 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2572 extract_bit_field (src, bitsize,
2573 xbitpos % BITS_PER_WORD, 1,
2574 NULL_RTX, word_mode, word_mode,
2582 /* Add a USE expression for REG to the (possibly empty) list pointed
2583 to by CALL_FUSAGE. REG must denote a hard register. */
2586 use_reg (call_fusage, reg)
2587 rtx *call_fusage, reg;
2589 if (GET_CODE (reg) != REG
2590 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2594 = gen_rtx_EXPR_LIST (VOIDmode,
2595 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2598 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2599 starting at REGNO. All of these registers must be hard registers. */
2602 use_regs (call_fusage, regno, nregs)
2609 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2612 for (i = 0; i < nregs; i++)
2613 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2616 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2617 PARALLEL REGS. This is for calls that pass values in multiple
2618 non-contiguous locations. The Irix 6 ABI has examples of this. */
2621 use_group_regs (call_fusage, regs)
2627 for (i = 0; i < XVECLEN (regs, 0); i++)
2629 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2631 /* A NULL entry means the parameter goes both on the stack and in
2632 registers. This can also be a MEM for targets that pass values
2633 partially on the stack and partially in registers. */
2634 if (reg != 0 && GET_CODE (reg) == REG)
2635 use_reg (call_fusage, reg);
2640 /* Determine whether the LEN bytes generated by CONSTFUN can be
2641 stored to memory using several move instructions. CONSTFUNDATA is
2642 a pointer which will be passed as argument in every CONSTFUN call.
2643 ALIGN is maximum alignment we can assume. Return nonzero if a
2644 call to store_by_pieces should succeed. */
2647 can_store_by_pieces (len, constfun, constfundata, align)
2648 unsigned HOST_WIDE_INT len;
2649 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2653 unsigned HOST_WIDE_INT max_size, l;
2654 HOST_WIDE_INT offset = 0;
2655 enum machine_mode mode, tmode;
2656 enum insn_code icode;
2660 if (! STORE_BY_PIECES_P (len, align))
2663 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2664 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2665 align = MOVE_MAX * BITS_PER_UNIT;
2667 /* We would first store what we can in the largest integer mode, then go to
2668 successively smaller modes. */
2671 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2676 max_size = STORE_MAX_PIECES + 1;
2677 while (max_size > 1)
2679 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2680 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2681 if (GET_MODE_SIZE (tmode) < max_size)
2684 if (mode == VOIDmode)
2687 icode = mov_optab->handlers[(int) mode].insn_code;
2688 if (icode != CODE_FOR_nothing
2689 && align >= GET_MODE_ALIGNMENT (mode))
2691 unsigned int size = GET_MODE_SIZE (mode);
2698 cst = (*constfun) (constfundata, offset, mode);
2699 if (!LEGITIMATE_CONSTANT_P (cst))
2709 max_size = GET_MODE_SIZE (mode);
2712 /* The code above should have handled everything. */
2720 /* Generate several move instructions to store LEN bytes generated by
2721 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2722 pointer which will be passed as argument in every CONSTFUN call.
2723 ALIGN is maximum alignment we can assume. */
2726 store_by_pieces (to, len, constfun, constfundata, align)
2728 unsigned HOST_WIDE_INT len;
2729 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2733 struct store_by_pieces data;
2735 if (! STORE_BY_PIECES_P (len, align))
2737 to = protect_from_queue (to, 1);
2738 data.constfun = constfun;
2739 data.constfundata = constfundata;
2742 store_by_pieces_1 (&data, align);
2745 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2746 rtx with BLKmode). The caller must pass TO through protect_from_queue
2747 before calling. ALIGN is maximum alignment we can assume. */
2750 clear_by_pieces (to, len, align)
2752 unsigned HOST_WIDE_INT len;
2755 struct store_by_pieces data;
2757 data.constfun = clear_by_pieces_1;
2758 data.constfundata = NULL;
2761 store_by_pieces_1 (&data, align);
2764 /* Callback routine for clear_by_pieces.
2765 Return const0_rtx unconditionally. */
2768 clear_by_pieces_1 (data, offset, mode)
2769 PTR data ATTRIBUTE_UNUSED;
2770 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2771 enum machine_mode mode ATTRIBUTE_UNUSED;
2776 /* Subroutine of clear_by_pieces and store_by_pieces.
2777 Generate several move instructions to store LEN bytes of block TO. (A MEM
2778 rtx with BLKmode). The caller must pass TO through protect_from_queue
2779 before calling. ALIGN is maximum alignment we can assume. */
2782 store_by_pieces_1 (data, align)
2783 struct store_by_pieces *data;
2786 rtx to_addr = XEXP (data->to, 0);
2787 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2788 enum machine_mode mode = VOIDmode, tmode;
2789 enum insn_code icode;
2792 data->to_addr = to_addr;
2794 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2795 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2797 data->explicit_inc_to = 0;
2799 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2801 data->offset = data->len;
2803 /* If storing requires more than two move insns,
2804 copy addresses to registers (to make displacements shorter)
2805 and use post-increment if available. */
2806 if (!data->autinc_to
2807 && move_by_pieces_ninsns (data->len, align) > 2)
2809 /* Determine the main mode we'll be using. */
2810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2811 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2812 if (GET_MODE_SIZE (tmode) < max_size)
2815 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2817 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2818 data->autinc_to = 1;
2819 data->explicit_inc_to = -1;
2822 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2823 && ! data->autinc_to)
2825 data->to_addr = copy_addr_to_reg (to_addr);
2826 data->autinc_to = 1;
2827 data->explicit_inc_to = 1;
2830 if ( !data->autinc_to && CONSTANT_P (to_addr))
2831 data->to_addr = copy_addr_to_reg (to_addr);
2834 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2835 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2836 align = MOVE_MAX * BITS_PER_UNIT;
2838 /* First store what we can in the largest integer mode, then go to
2839 successively smaller modes. */
2841 while (max_size > 1)
2843 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2844 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2845 if (GET_MODE_SIZE (tmode) < max_size)
2848 if (mode == VOIDmode)
2851 icode = mov_optab->handlers[(int) mode].insn_code;
2852 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2853 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2855 max_size = GET_MODE_SIZE (mode);
2858 /* The code above should have handled everything. */
2863 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2864 with move instructions for mode MODE. GENFUN is the gen_... function
2865 to make a move insn for that mode. DATA has all the other info. */
2868 store_by_pieces_2 (genfun, mode, data)
2869 rtx (*genfun) PARAMS ((rtx, ...));
2870 enum machine_mode mode;
2871 struct store_by_pieces *data;
2873 unsigned int size = GET_MODE_SIZE (mode);
2876 while (data->len >= size)
2879 data->offset -= size;
2881 if (data->autinc_to)
2882 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2885 to1 = adjust_address (data->to, mode, data->offset);
2887 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2888 emit_insn (gen_add2_insn (data->to_addr,
2889 GEN_INT (-(HOST_WIDE_INT) size)));
2891 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2892 emit_insn ((*genfun) (to1, cst));
2894 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2895 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2897 if (! data->reverse)
2898 data->offset += size;
2904 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2905 its length in bytes. */
2908 clear_storage (object, size)
2913 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2914 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2916 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2917 just move a zero. Otherwise, do this a piece at a time. */
2918 if (GET_MODE (object) != BLKmode
2919 && GET_CODE (size) == CONST_INT
2920 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2921 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2924 object = protect_from_queue (object, 1);
2925 size = protect_from_queue (size, 0);
2927 if (GET_CODE (size) == CONST_INT
2928 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2929 clear_by_pieces (object, INTVAL (size), align);
2930 else if (clear_storage_via_clrstr (object, size, align))
2933 retval = clear_storage_via_libcall (object, size);
2939 /* A subroutine of clear_storage. Expand a clrstr pattern;
2940 return true if successful. */
2943 clear_storage_via_clrstr (object, size, align)
2947 /* Try the most limited insn first, because there's no point
2948 including more than one in the machine description unless
2949 the more limited one has some advantage. */
2951 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2952 enum machine_mode mode;
2954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2955 mode = GET_MODE_WIDER_MODE (mode))
2957 enum insn_code code = clrstr_optab[(int) mode];
2958 insn_operand_predicate_fn pred;
2960 if (code != CODE_FOR_nothing
2961 /* We don't need MODE to be narrower than
2962 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2963 the mode mask, as it is returned by the macro, it will
2964 definitely be less than the actual mode mask. */
2965 && ((GET_CODE (size) == CONST_INT
2966 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2967 <= (GET_MODE_MASK (mode) >> 1)))
2968 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2969 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2970 || (*pred) (object, BLKmode))
2971 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2972 || (*pred) (opalign, VOIDmode)))
2975 rtx last = get_last_insn ();
2978 op1 = convert_to_mode (mode, size, 1);
2979 pred = insn_data[(int) code].operand[1].predicate;
2980 if (pred != 0 && ! (*pred) (op1, mode))
2981 op1 = copy_to_mode_reg (mode, op1);
2983 pat = GEN_FCN ((int) code) (object, op1, opalign);
2990 delete_insns_since (last);
2997 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2998 Return the return value of memset, 0 otherwise. */
3001 clear_storage_via_libcall (object, size)
3004 tree call_expr, arg_list, fn, object_tree, size_tree;
3005 enum machine_mode size_mode;
3008 /* OBJECT or SIZE may have been passed through protect_from_queue.
3010 It is unsafe to save the value generated by protect_from_queue
3011 and reuse it later. Consider what happens if emit_queue is
3012 called before the return value from protect_from_queue is used.
3014 Expansion of the CALL_EXPR below will call emit_queue before
3015 we are finished emitting RTL for argument setup. So if we are
3016 not careful we could get the wrong value for an argument.
3018 To avoid this problem we go ahead and emit code to copy OBJECT
3019 and SIZE into new pseudos. We can then place those new pseudos
3020 into an RTL_EXPR and use them later, even after a call to
3023 Note this is not strictly needed for library calls since they
3024 do not call emit_queue before loading their arguments. However,
3025 we may need to have library calls call emit_queue in the future
3026 since failing to do so could cause problems for targets which
3027 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3029 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3031 if (TARGET_MEM_FUNCTIONS)
3032 size_mode = TYPE_MODE (sizetype);
3034 size_mode = TYPE_MODE (unsigned_type_node);
3035 size = convert_to_mode (size_mode, size, 1);
3036 size = copy_to_mode_reg (size_mode, size);
3038 /* It is incorrect to use the libcall calling conventions to call
3039 memset in this context. This could be a user call to memset and
3040 the user may wish to examine the return value from memset. For
3041 targets where libcalls and normal calls have different conventions
3042 for returning pointers, we could end up generating incorrect code.
3044 For convenience, we generate the call to bzero this way as well. */
3046 object_tree = make_tree (ptr_type_node, object);
3047 if (TARGET_MEM_FUNCTIONS)
3048 size_tree = make_tree (sizetype, size);
3050 size_tree = make_tree (unsigned_type_node, size);
3052 fn = clear_storage_libcall_fn (true);
3053 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3054 if (TARGET_MEM_FUNCTIONS)
3055 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3056 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3058 /* Now we have to build up the CALL_EXPR itself. */
3059 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3060 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3061 call_expr, arg_list, NULL_TREE);
3062 TREE_SIDE_EFFECTS (call_expr) = 1;
3064 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3066 /* If we are initializing a readonly value, show the above call
3067 clobbered it. Otherwise, a load from it may erroneously be
3068 hoisted from a loop. */
3069 if (RTX_UNCHANGING_P (object))
3070 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3072 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3075 /* A subroutine of clear_storage_via_libcall. Create the tree node
3076 for the function we use for block clears. The first time FOR_CALL
3077 is true, we call assemble_external. */
3079 static GTY(()) tree block_clear_fn;
3082 clear_storage_libcall_fn (for_call)
3085 static bool emitted_extern;
3086 tree fn = block_clear_fn, args;
3090 if (TARGET_MEM_FUNCTIONS)
3092 fn = get_identifier ("memset");
3093 args = build_function_type_list (ptr_type_node, ptr_type_node,
3094 integer_type_node, sizetype,
3099 fn = get_identifier ("bzero");
3100 args = build_function_type_list (void_type_node, ptr_type_node,
3101 unsigned_type_node, NULL_TREE);
3104 fn = build_decl (FUNCTION_DECL, fn, args);
3105 DECL_EXTERNAL (fn) = 1;
3106 TREE_PUBLIC (fn) = 1;
3107 DECL_ARTIFICIAL (fn) = 1;
3108 TREE_NOTHROW (fn) = 1;
3110 block_clear_fn = fn;
3113 if (for_call && !emitted_extern)
3115 emitted_extern = true;
3116 make_decl_rtl (fn, NULL);
3117 assemble_external (fn);
3123 /* Generate code to copy Y into X.
3124 Both Y and X must have the same mode, except that
3125 Y can be a constant with VOIDmode.
3126 This mode cannot be BLKmode; use emit_block_move for that.
3128 Return the last instruction emitted. */
3131 emit_move_insn (x, y)
3134 enum machine_mode mode = GET_MODE (x);
3135 rtx y_cst = NULL_RTX;
3138 x = protect_from_queue (x, 1);
3139 y = protect_from_queue (y, 0);
3141 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3144 /* Never force constant_p_rtx to memory. */
3145 if (GET_CODE (y) == CONSTANT_P_RTX)
3147 else if (CONSTANT_P (y))
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3151 && (last_insn = compress_float_constant (x, y)))
3154 if (!LEGITIMATE_CONSTANT_P (y))
3157 y = force_const_mem (mode, y);
3159 /* If the target's cannot_force_const_mem prevented the spill,
3160 assume that the target's move expanders will also take care
3161 of the non-legitimate constant. */
3167 /* If X or Y are memory references, verify that their addresses are valid
3169 if (GET_CODE (x) == MEM
3170 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3171 && ! push_operand (x, GET_MODE (x)))
3173 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3174 x = validize_mem (x);
3176 if (GET_CODE (y) == MEM
3177 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3179 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3180 y = validize_mem (y);
3182 if (mode == BLKmode)
3185 last_insn = emit_move_insn_1 (x, y);
3187 if (y_cst && GET_CODE (x) == REG)
3188 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3193 /* Low level part of emit_move_insn.
3194 Called just like emit_move_insn, but assumes X and Y
3195 are basically valid. */
3198 emit_move_insn_1 (x, y)
3201 enum machine_mode mode = GET_MODE (x);
3202 enum machine_mode submode;
3203 enum mode_class class = GET_MODE_CLASS (mode);
3205 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3208 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3210 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3212 /* Expand complex moves by moving real part and imag part, if possible. */
3213 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3214 && BLKmode != (submode = GET_MODE_INNER (mode))
3215 && (mov_optab->handlers[(int) submode].insn_code
3216 != CODE_FOR_nothing))
3218 /* Don't split destination if it is a stack push. */
3219 int stack = push_operand (x, GET_MODE (x));
3221 #ifdef PUSH_ROUNDING
3222 /* In case we output to the stack, but the size is smaller machine can
3223 push exactly, we need to use move instructions. */
3225 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3226 != GET_MODE_SIZE (submode)))
3229 HOST_WIDE_INT offset1, offset2;
3231 /* Do not use anti_adjust_stack, since we don't want to update
3232 stack_pointer_delta. */
3233 temp = expand_binop (Pmode,
3234 #ifdef STACK_GROWS_DOWNWARD
3242 (GET_MODE_SIZE (GET_MODE (x)))),
3243 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3245 if (temp != stack_pointer_rtx)
3246 emit_move_insn (stack_pointer_rtx, temp);
3248 #ifdef STACK_GROWS_DOWNWARD
3250 offset2 = GET_MODE_SIZE (submode);
3252 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3253 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3254 + GET_MODE_SIZE (submode));
3257 emit_move_insn (change_address (x, submode,
3258 gen_rtx_PLUS (Pmode,
3260 GEN_INT (offset1))),
3261 gen_realpart (submode, y));
3262 emit_move_insn (change_address (x, submode,
3263 gen_rtx_PLUS (Pmode,
3265 GEN_INT (offset2))),
3266 gen_imagpart (submode, y));
3270 /* If this is a stack, push the highpart first, so it
3271 will be in the argument order.
3273 In that case, change_address is used only to convert
3274 the mode, not to change the address. */
3277 /* Note that the real part always precedes the imag part in memory
3278 regardless of machine's endianness. */
3279 #ifdef STACK_GROWS_DOWNWARD
3280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3281 (gen_rtx_MEM (submode, XEXP (x, 0)),
3282 gen_imagpart (submode, y)));
3283 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3284 (gen_rtx_MEM (submode, XEXP (x, 0)),
3285 gen_realpart (submode, y)));
3287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3288 (gen_rtx_MEM (submode, XEXP (x, 0)),
3289 gen_realpart (submode, y)));
3290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3291 (gen_rtx_MEM (submode, XEXP (x, 0)),
3292 gen_imagpart (submode, y)));
3297 rtx realpart_x, realpart_y;
3298 rtx imagpart_x, imagpart_y;
3300 /* If this is a complex value with each part being smaller than a
3301 word, the usual calling sequence will likely pack the pieces into
3302 a single register. Unfortunately, SUBREG of hard registers only
3303 deals in terms of words, so we have a problem converting input
3304 arguments to the CONCAT of two registers that is used elsewhere
3305 for complex values. If this is before reload, we can copy it into
3306 memory and reload. FIXME, we should see about using extract and
3307 insert on integer registers, but complex short and complex char
3308 variables should be rarely used. */
3309 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3310 && (reload_in_progress | reload_completed) == 0)
3313 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3315 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3317 if (packed_dest_p || packed_src_p)
3319 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3320 ? MODE_FLOAT : MODE_INT);
3322 enum machine_mode reg_mode
3323 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3325 if (reg_mode != BLKmode)
3327 rtx mem = assign_stack_temp (reg_mode,
3328 GET_MODE_SIZE (mode), 0);
3329 rtx cmem = adjust_address (mem, mode, 0);
3332 = N_("function using short complex types cannot be inline");
3336 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3338 emit_move_insn_1 (cmem, y);
3339 return emit_move_insn_1 (sreg, mem);
3343 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3345 emit_move_insn_1 (mem, sreg);
3346 return emit_move_insn_1 (x, cmem);
3352 realpart_x = gen_realpart (submode, x);
3353 realpart_y = gen_realpart (submode, y);
3354 imagpart_x = gen_imagpart (submode, x);
3355 imagpart_y = gen_imagpart (submode, y);
3357 /* Show the output dies here. This is necessary for SUBREGs
3358 of pseudos since we cannot track their lifetimes correctly;
3359 hard regs shouldn't appear here except as return values.
3360 We never want to emit such a clobber after reload. */
3362 && ! (reload_in_progress || reload_completed)
3363 && (GET_CODE (realpart_x) == SUBREG
3364 || GET_CODE (imagpart_x) == SUBREG))
3365 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3367 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3368 (realpart_x, realpart_y));
3369 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3370 (imagpart_x, imagpart_y));
3373 return get_last_insn ();
3376 /* This will handle any multi-word or full-word mode that lacks a move_insn
3377 pattern. However, you will get better code if you define such patterns,
3378 even if they must turn into multiple assembler instructions. */
3379 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3386 #ifdef PUSH_ROUNDING
3388 /* If X is a push on the stack, do the push now and replace
3389 X with a reference to the stack pointer. */
3390 if (push_operand (x, GET_MODE (x)))
3395 /* Do not use anti_adjust_stack, since we don't want to update
3396 stack_pointer_delta. */
3397 temp = expand_binop (Pmode,
3398 #ifdef STACK_GROWS_DOWNWARD
3406 (GET_MODE_SIZE (GET_MODE (x)))),
3407 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3409 if (temp != stack_pointer_rtx)
3410 emit_move_insn (stack_pointer_rtx, temp);
3412 code = GET_CODE (XEXP (x, 0));
3414 /* Just hope that small offsets off SP are OK. */
3415 if (code == POST_INC)
3416 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3417 GEN_INT (-((HOST_WIDE_INT)
3418 GET_MODE_SIZE (GET_MODE (x)))));
3419 else if (code == POST_DEC)
3420 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3421 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3423 temp = stack_pointer_rtx;
3425 x = change_address (x, VOIDmode, temp);
3429 /* If we are in reload, see if either operand is a MEM whose address
3430 is scheduled for replacement. */
3431 if (reload_in_progress && GET_CODE (x) == MEM
3432 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3433 x = replace_equiv_address_nv (x, inner);
3434 if (reload_in_progress && GET_CODE (y) == MEM
3435 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3436 y = replace_equiv_address_nv (y, inner);
3442 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3445 rtx xpart = operand_subword (x, i, 1, mode);
3446 rtx ypart = operand_subword (y, i, 1, mode);
3448 /* If we can't get a part of Y, put Y into memory if it is a
3449 constant. Otherwise, force it into a register. If we still
3450 can't get a part of Y, abort. */
3451 if (ypart == 0 && CONSTANT_P (y))
3453 y = force_const_mem (mode, y);
3454 ypart = operand_subword (y, i, 1, mode);
3456 else if (ypart == 0)
3457 ypart = operand_subword_force (y, i, mode);
3459 if (xpart == 0 || ypart == 0)
3462 need_clobber |= (GET_CODE (xpart) == SUBREG);
3464 last_insn = emit_move_insn (xpart, ypart);
3470 /* Show the output dies here. This is necessary for SUBREGs
3471 of pseudos since we cannot track their lifetimes correctly;
3472 hard regs shouldn't appear here except as return values.
3473 We never want to emit such a clobber after reload. */
3475 && ! (reload_in_progress || reload_completed)
3476 && need_clobber != 0)
3477 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3487 /* If Y is representable exactly in a narrower mode, and the target can
3488 perform the extension directly from constant or memory, then emit the
3489 move as an extension. */
3492 compress_float_constant (x, y)
3495 enum machine_mode dstmode = GET_MODE (x);
3496 enum machine_mode orig_srcmode = GET_MODE (y);
3497 enum machine_mode srcmode;
3500 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3502 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3503 srcmode != orig_srcmode;
3504 srcmode = GET_MODE_WIDER_MODE (srcmode))
3507 rtx trunc_y, last_insn;
3509 /* Skip if the target can't extend this way. */
3510 ic = can_extend_p (dstmode, srcmode, 0);
3511 if (ic == CODE_FOR_nothing)
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode, &r))
3518 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 /* Skip if the target needs extra instructions to perform
3524 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3527 else if (float_extend_from_mem[dstmode][srcmode])
3528 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3532 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3533 last_insn = get_last_insn ();
3535 if (GET_CODE (x) == REG)
3536 REG_NOTES (last_insn)
3537 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3545 /* Pushing data onto the stack. */
3547 /* Push a block of length SIZE (perhaps variable)
3548 and return an rtx to address the beginning of the block.
3549 Note that it is not possible for the value returned to be a QUEUED.
3550 The value may be virtual_outgoing_args_rtx.
3552 EXTRA is the number of bytes of padding to push in addition to SIZE.
3553 BELOW nonzero means this padding comes at low addresses;
3554 otherwise, the padding comes at high addresses. */
3557 push_block (size, extra, below)
3563 size = convert_modes (Pmode, ptr_mode, size, 1);
3564 if (CONSTANT_P (size))
3565 anti_adjust_stack (plus_constant (size, extra));
3566 else if (GET_CODE (size) == REG && extra == 0)
3567 anti_adjust_stack (size);
3570 temp = copy_to_mode_reg (Pmode, size);
3572 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3573 temp, 0, OPTAB_LIB_WIDEN);
3574 anti_adjust_stack (temp);
3577 #ifndef STACK_GROWS_DOWNWARD
3583 temp = virtual_outgoing_args_rtx;
3584 if (extra != 0 && below)
3585 temp = plus_constant (temp, extra);
3589 if (GET_CODE (size) == CONST_INT)
3590 temp = plus_constant (virtual_outgoing_args_rtx,
3591 -INTVAL (size) - (below ? 0 : extra));
3592 else if (extra != 0 && !below)
3593 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3594 negate_rtx (Pmode, plus_constant (size, extra)));
3596 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3597 negate_rtx (Pmode, size));
3600 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3603 #ifdef PUSH_ROUNDING
3605 /* Emit single push insn. */
3608 emit_single_push_insn (mode, x, type)
3610 enum machine_mode mode;
3614 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3616 enum insn_code icode;
3617 insn_operand_predicate_fn pred;
3619 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3620 /* If there is push pattern, use it. Otherwise try old way of throwing
3621 MEM representing push operation to move expander. */
3622 icode = push_optab->handlers[(int) mode].insn_code;
3623 if (icode != CODE_FOR_nothing)
3625 if (((pred = insn_data[(int) icode].operand[0].predicate)
3626 && !((*pred) (x, mode))))
3627 x = force_reg (mode, x);
3628 emit_insn (GEN_FCN (icode) (x));
3631 if (GET_MODE_SIZE (mode) == rounded_size)
3632 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3635 #ifdef STACK_GROWS_DOWNWARD
3636 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3637 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3639 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3640 GEN_INT (rounded_size));
3642 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3645 dest = gen_rtx_MEM (mode, dest_addr);
3649 set_mem_attributes (dest, type, 1);
3651 if (flag_optimize_sibling_calls)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest, 0);
3658 emit_move_insn (dest, x);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 words of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL words,
3674 rounded *down* to a multiple of PARM_BOUNDARY.
3675 REG must be a hard register in this case.
3676 If REG is zero but PARTIAL is not, take any all others actions for an
3677 argument partially in registers, but do not actually load any
3680 EXTRA is the amount in bytes of extra space to leave next to this arg.
3681 This is ignored if an argument block has already been allocated.
3683 On a machine that lacks real push insns, ARGS_ADDR is the address of
3684 the bottom of the argument block for this call. We use indexing off there
3685 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3686 argument block has not been preallocated.
3688 ARGS_SO_FAR is the size of args previously pushed for this call.
3690 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3691 for arguments passed in registers. If nonzero, it will be the number
3692 of bytes required. */
3695 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3696 args_addr, args_so_far, reg_parm_stack_space,
3699 enum machine_mode mode;
3708 int reg_parm_stack_space;
3712 enum direction stack_direction
3713 #ifdef STACK_GROWS_DOWNWARD
3719 /* Decide where to pad the argument: `downward' for below,
3720 `upward' for above, or `none' for don't pad it.
3721 Default is below for small data on big-endian machines; else above. */
3722 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3724 /* Invert direction if stack is post-decrement.
3726 if (STACK_PUSH_CODE == POST_DEC)
3727 if (where_pad != none)
3728 where_pad = (where_pad == downward ? upward : downward);
3730 xinner = x = protect_from_queue (x, 0);
3732 if (mode == BLKmode)
3734 /* Copy a block into the stack, entirely or partially. */
3737 int used = partial * UNITS_PER_WORD;
3738 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3746 /* USED is now the # of bytes we need not copy to the stack
3747 because registers will take care of them. */
3750 xinner = adjust_address (xinner, BLKmode, used);
3752 /* If the partial register-part of the arg counts in its stack size,
3753 skip the part of stack space corresponding to the registers.
3754 Otherwise, start copying to the beginning of the stack space,
3755 by setting SKIP to 0. */
3756 skip = (reg_parm_stack_space == 0) ? 0 : used;
3758 #ifdef PUSH_ROUNDING
3759 /* Do it with several push insns if that doesn't take lots of insns
3760 and if there is no difficulty with push insns that skip bytes
3761 on the stack for alignment purposes. */
3764 && GET_CODE (size) == CONST_INT
3766 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3767 /* Here we avoid the case of a structure whose weak alignment
3768 forces many pushes of a small amount of data,
3769 and such small pushes do rounding that causes trouble. */
3770 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3771 || align >= BIGGEST_ALIGNMENT
3772 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3773 == (align / BITS_PER_UNIT)))
3774 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3776 /* Push padding now if padding above and stack grows down,
3777 or if padding below and stack grows up.
3778 But if space already allocated, this has already been done. */
3779 if (extra && args_addr == 0
3780 && where_pad != none && where_pad != stack_direction)
3781 anti_adjust_stack (GEN_INT (extra));
3783 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3786 #endif /* PUSH_ROUNDING */
3790 /* Otherwise make space on the stack and copy the data
3791 to the address of that space. */
3793 /* Deduct words put into registers from the size we must copy. */
3796 if (GET_CODE (size) == CONST_INT)
3797 size = GEN_INT (INTVAL (size) - used);
3799 size = expand_binop (GET_MODE (size), sub_optab, size,
3800 GEN_INT (used), NULL_RTX, 0,
3804 /* Get the address of the stack space.
3805 In this case, we do not deal with EXTRA separately.
3806 A single stack adjust will do. */
3809 temp = push_block (size, extra, where_pad == downward);
3812 else if (GET_CODE (args_so_far) == CONST_INT)
3813 temp = memory_address (BLKmode,
3814 plus_constant (args_addr,
3815 skip + INTVAL (args_so_far)));
3817 temp = memory_address (BLKmode,
3818 plus_constant (gen_rtx_PLUS (Pmode,
3823 if (!ACCUMULATE_OUTGOING_ARGS)
3825 /* If the source is referenced relative to the stack pointer,
3826 copy it to another register to stabilize it. We do not need
3827 to do this if we know that we won't be changing sp. */
3829 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3830 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3831 temp = copy_to_reg (temp);
3834 target = gen_rtx_MEM (BLKmode, temp);
3838 set_mem_attributes (target, type, 1);
3839 /* Function incoming arguments may overlap with sibling call
3840 outgoing arguments and we cannot allow reordering of reads
3841 from function arguments with stores to outgoing arguments
3842 of sibling calls. */
3843 set_mem_alias_set (target, 0);
3846 /* ALIGN may well be better aligned than TYPE, e.g. due to
3847 PARM_BOUNDARY. Assume the caller isn't lying. */
3848 set_mem_align (target, align);
3850 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3853 else if (partial > 0)
3855 /* Scalar partly in registers. */
3857 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3860 /* # words of start of argument
3861 that we must make space for but need not store. */
3862 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3863 int args_offset = INTVAL (args_so_far);
3866 /* Push padding now if padding above and stack grows down,
3867 or if padding below and stack grows up.
3868 But if space already allocated, this has already been done. */
3869 if (extra && args_addr == 0
3870 && where_pad != none && where_pad != stack_direction)
3871 anti_adjust_stack (GEN_INT (extra));
3873 /* If we make space by pushing it, we might as well push
3874 the real data. Otherwise, we can leave OFFSET nonzero
3875 and leave the space uninitialized. */
3879 /* Now NOT_STACK gets the number of words that we don't need to
3880 allocate on the stack. */
3881 not_stack = partial - offset;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3889 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3890 x = validize_mem (force_const_mem (mode, x));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3896 x = copy_to_reg (x);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i = not_stack; i < size; i++)
3904 for (i = size - 1; i >= not_stack; i--)
3906 if (i >= not_stack + offset)
3907 emit_push_insn (operand_subword_force (x, i, mode),
3908 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3910 GEN_INT (args_offset + ((i - not_stack + skip)
3912 reg_parm_stack_space, alignment_pad);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra && args_addr == 0
3923 && where_pad != none && where_pad != stack_direction)
3924 anti_adjust_stack (GEN_INT (extra));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr == 0 && PUSH_ARGS)
3928 emit_single_push_insn (mode, x, type);
3932 if (GET_CODE (args_so_far) == CONST_INT)
3934 = memory_address (mode,
3935 plus_constant (args_addr,
3936 INTVAL (args_so_far)));
3938 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3940 dest = gen_rtx_MEM (mode, addr);
3943 set_mem_attributes (dest, type, 1);
3944 /* Function incoming arguments may overlap with sibling call
3945 outgoing arguments and we cannot allow reordering of reads
3946 from function arguments with stores to outgoing arguments
3947 of sibling calls. */
3948 set_mem_alias_set (dest, 0);
3951 emit_move_insn (dest, x);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial > 0 && reg != 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg) == PARALLEL)
3963 emit_group_load (reg, x, -1); /* ??? size? */
3965 move_block_to_reg (REGNO (reg), x, partial, mode);
3968 if (extra && args_addr == 0 && where_pad == stack_direction)
3969 anti_adjust_stack (GEN_INT (extra));
3971 if (alignment_pad && args_addr == 0)
3972 anti_adjust_stack (alignment_pad);
3975 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3983 /* Only registers can be subtargets. */
3984 || GET_CODE (x) != REG
3985 /* If the register is readonly, it can't be set more than once. */
3986 || RTX_UNCHANGING_P (x)
3987 /* Don't use hard regs to avoid extending their life. */
3988 || REGNO (x) < FIRST_PSEUDO_REGISTER
3989 /* Avoid subtargets inside loops,
3990 since they hide some invariant expressions. */
3991 || preserve_subexpressions_p ())
3995 /* Expand an assignment that stores the value of FROM into TO.
3996 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3997 (This may contain a QUEUED rtx;
3998 if the value is constant, this rtx is a constant.)
3999 Otherwise, the returned value is NULL_RTX.
4001 SUGGEST_REG is no longer actually used.
4002 It used to mean, copy the value through a register
4003 and return that register, if that is possible.
4004 We now use WANT_VALUE to decide whether to do this. */
4007 expand_assignment (to, from, want_value, suggest_reg)
4010 int suggest_reg ATTRIBUTE_UNUSED;
4015 /* Don't crash if the lhs of the assignment was erroneous. */
4017 if (TREE_CODE (to) == ERROR_MARK)
4019 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4020 return want_value ? result : NULL_RTX;
4023 /* Assignment of a structure component needs special treatment
4024 if the structure component's rtx is not simply a MEM.
4025 Assignment of an array element at a constant index, and assignment of
4026 an array element in an unaligned packed structure field, has the same
4029 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4030 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4031 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4033 enum machine_mode mode1;
4034 HOST_WIDE_INT bitsize, bitpos;
4042 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4043 &unsignedp, &volatilep);
4045 /* If we are going to use store_bit_field and extract_bit_field,
4046 make sure to_rtx will be safe for multiple use. */
4048 if (mode1 == VOIDmode && want_value)
4049 tem = stabilize_reference (tem);
4051 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4055 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4057 if (GET_CODE (to_rtx) != MEM)
4060 #ifdef POINTERS_EXTEND_UNSIGNED
4061 if (GET_MODE (offset_rtx) != Pmode)
4062 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4064 if (GET_MODE (offset_rtx) != ptr_mode)
4065 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4068 /* A constant address in TO_RTX can have VOIDmode, we must not try
4069 to call force_reg for that case. Avoid that case. */
4070 if (GET_CODE (to_rtx) == MEM
4071 && GET_MODE (to_rtx) == BLKmode
4072 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4074 && (bitpos % bitsize) == 0
4075 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4076 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4078 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4082 to_rtx = offset_address (to_rtx, offset_rtx,
4083 highest_pow2_factor_for_type (TREE_TYPE (to),
4087 if (GET_CODE (to_rtx) == MEM)
4089 /* If the field is at offset zero, we could have been given the
4090 DECL_RTX of the parent struct. Don't munge it. */
4091 to_rtx = shallow_copy_rtx (to_rtx);
4093 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4096 /* Deal with volatile and readonly fields. The former is only done
4097 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4098 if (volatilep && GET_CODE (to_rtx) == MEM)
4100 if (to_rtx == orig_to_rtx)
4101 to_rtx = copy_rtx (to_rtx);
4102 MEM_VOLATILE_P (to_rtx) = 1;
4105 if (TREE_CODE (to) == COMPONENT_REF
4106 && TREE_READONLY (TREE_OPERAND (to, 1)))
4108 if (to_rtx == orig_to_rtx)
4109 to_rtx = copy_rtx (to_rtx);
4110 RTX_UNCHANGING_P (to_rtx) = 1;
4113 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4115 if (to_rtx == orig_to_rtx)
4116 to_rtx = copy_rtx (to_rtx);
4117 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4120 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4122 /* Spurious cast for HPUX compiler. */
4123 ? ((enum machine_mode)
4124 TYPE_MODE (TREE_TYPE (to)))
4126 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4128 preserve_temp_slots (result);
4132 /* If the value is meaningful, convert RESULT to the proper mode.
4133 Otherwise, return nothing. */
4134 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4135 TYPE_MODE (TREE_TYPE (from)),
4137 TREE_UNSIGNED (TREE_TYPE (to)))
4141 /* If the rhs is a function call and its value is not an aggregate,
4142 call the function before we start to compute the lhs.
4143 This is needed for correct code for cases such as
4144 val = setjmp (buf) on machines where reference to val
4145 requires loading up part of an address in a separate insn.
4147 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4148 since it might be a promoted variable where the zero- or sign- extension
4149 needs to be done. Handling this in the normal way is safe because no
4150 computation is done before the call. */
4151 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4152 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4153 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4154 && GET_CODE (DECL_RTL (to)) == REG))
4159 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4161 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4163 /* Handle calls that return values in multiple non-contiguous locations.
4164 The Irix 6 ABI has examples of this. */
4165 if (GET_CODE (to_rtx) == PARALLEL)
4166 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4167 else if (GET_MODE (to_rtx) == BLKmode)
4168 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4171 #ifdef POINTERS_EXTEND_UNSIGNED
4172 if (POINTER_TYPE_P (TREE_TYPE (to))
4173 && GET_MODE (to_rtx) != GET_MODE (value))
4174 value = convert_memory_address (GET_MODE (to_rtx), value);
4176 emit_move_insn (to_rtx, value);
4178 preserve_temp_slots (to_rtx);
4181 return want_value ? to_rtx : NULL_RTX;
4184 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4185 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4188 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4190 /* Don't move directly into a return register. */
4191 if (TREE_CODE (to) == RESULT_DECL
4192 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4197 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4199 if (GET_CODE (to_rtx) == PARALLEL)
4200 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4202 emit_move_insn (to_rtx, temp);
4204 preserve_temp_slots (to_rtx);
4207 return want_value ? to_rtx : NULL_RTX;
4210 /* In case we are returning the contents of an object which overlaps
4211 the place the value is being stored, use a safe function when copying
4212 a value through a pointer into a structure value return block. */
4213 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4214 && current_function_returns_struct
4215 && !current_function_returns_pcc_struct)
4220 size = expr_size (from);
4221 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4223 if (TARGET_MEM_FUNCTIONS)
4224 emit_library_call (memmove_libfunc, LCT_NORMAL,
4225 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4226 XEXP (from_rtx, 0), Pmode,
4227 convert_to_mode (TYPE_MODE (sizetype),
4228 size, TREE_UNSIGNED (sizetype)),
4229 TYPE_MODE (sizetype));
4231 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4232 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4233 XEXP (to_rtx, 0), Pmode,
4234 convert_to_mode (TYPE_MODE (integer_type_node),
4236 TREE_UNSIGNED (integer_type_node)),
4237 TYPE_MODE (integer_type_node));
4239 preserve_temp_slots (to_rtx);
4242 return want_value ? to_rtx : NULL_RTX;
4245 /* Compute FROM and store the value in the rtx we got. */
4248 result = store_expr (from, to_rtx, want_value);
4249 preserve_temp_slots (result);
4252 return want_value ? result : NULL_RTX;
4255 /* Generate code for computing expression EXP,
4256 and storing the value into TARGET.
4257 TARGET may contain a QUEUED rtx.
4259 If WANT_VALUE & 1 is nonzero, return a copy of the value
4260 not in TARGET, so that we can be sure to use the proper
4261 value in a containing expression even if TARGET has something
4262 else stored in it. If possible, we copy the value through a pseudo
4263 and return that pseudo. Or, if the value is constant, we try to
4264 return the constant. In some cases, we return a pseudo
4265 copied *from* TARGET.
4267 If the mode is BLKmode then we may return TARGET itself.
4268 It turns out that in BLKmode it doesn't cause a problem.
4269 because C has no operators that could combine two different
4270 assignments into the same BLKmode object with different values
4271 with no sequence point. Will other languages need this to
4274 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4275 to catch quickly any cases where the caller uses the value
4276 and fails to set WANT_VALUE.
4278 If WANT_VALUE & 2 is set, this is a store into a call param on the
4279 stack, and block moves may need to be treated specially. */
4282 store_expr (exp, target, want_value)
4288 int dont_return_target = 0;
4289 int dont_store_target = 0;
4291 if (TREE_CODE (exp) == COMPOUND_EXPR)
4293 /* Perform first part of compound expression, then assign from second
4295 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4296 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4298 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4300 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4302 /* For conditional expression, get safe form of the target. Then
4303 test the condition, doing the appropriate assignment on either
4304 side. This avoids the creation of unnecessary temporaries.
4305 For non-BLKmode, it is more efficient not to do this. */
4307 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4310 target = protect_from_queue (target, 1);
4312 do_pending_stack_adjust ();
4314 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4315 start_cleanup_deferral ();
4316 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4317 end_cleanup_deferral ();
4319 emit_jump_insn (gen_jump (lab2));
4322 start_cleanup_deferral ();
4323 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4324 end_cleanup_deferral ();
4329 return want_value & 1 ? target : NULL_RTX;
4331 else if (queued_subexp_p (target))
4332 /* If target contains a postincrement, let's not risk
4333 using it as the place to generate the rhs. */
4335 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4337 /* Expand EXP into a new pseudo. */
4338 temp = gen_reg_rtx (GET_MODE (target));
4339 temp = expand_expr (exp, temp, GET_MODE (target),
4341 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4344 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4346 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4348 /* If target is volatile, ANSI requires accessing the value
4349 *from* the target, if it is accessed. So make that happen.
4350 In no case return the target itself. */
4351 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4352 dont_return_target = 1;
4354 else if ((want_value & 1) != 0
4355 && GET_CODE (target) == MEM
4356 && ! MEM_VOLATILE_P (target)
4357 && GET_MODE (target) != BLKmode)
4358 /* If target is in memory and caller wants value in a register instead,
4359 arrange that. Pass TARGET as target for expand_expr so that,
4360 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4361 We know expand_expr will not use the target in that case.
4362 Don't do this if TARGET is volatile because we are supposed
4363 to write it and then read it. */
4365 temp = expand_expr (exp, target, GET_MODE (target),
4366 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4367 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4369 /* If TEMP is already in the desired TARGET, only copy it from
4370 memory and don't store it there again. */
4372 || (rtx_equal_p (temp, target)
4373 && ! side_effects_p (temp) && ! side_effects_p (target)))
4374 dont_store_target = 1;
4375 temp = copy_to_reg (temp);
4377 dont_return_target = 1;
4379 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4380 /* If this is a scalar in a register that is stored in a wider mode
4381 than the declared mode, compute the result into its declared mode
4382 and then convert to the wider mode. Our value is the computed
4385 rtx inner_target = 0;
4387 /* If we don't want a value, we can do the conversion inside EXP,
4388 which will often result in some optimizations. Do the conversion
4389 in two steps: first change the signedness, if needed, then
4390 the extend. But don't do this if the type of EXP is a subtype
4391 of something else since then the conversion might involve
4392 more than just converting modes. */
4393 if ((want_value & 1) == 0
4394 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4395 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4397 if (TREE_UNSIGNED (TREE_TYPE (exp))
4398 != SUBREG_PROMOTED_UNSIGNED_P (target))
4400 ((*lang_hooks.types.signed_or_unsigned_type)
4401 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4403 exp = convert ((*lang_hooks.types.type_for_mode)
4404 (GET_MODE (SUBREG_REG (target)),
4405 SUBREG_PROMOTED_UNSIGNED_P (target)),
4408 inner_target = SUBREG_REG (target);
4411 temp = expand_expr (exp, inner_target, VOIDmode,
4412 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4414 /* If TEMP is a MEM and we want a result value, make the access
4415 now so it gets done only once. Strictly speaking, this is
4416 only necessary if the MEM is volatile, or if the address
4417 overlaps TARGET. But not performing the load twice also
4418 reduces the amount of rtl we generate and then have to CSE. */
4419 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4420 temp = copy_to_reg (temp);
4422 /* If TEMP is a VOIDmode constant, use convert_modes to make
4423 sure that we properly convert it. */
4424 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4426 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4427 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4428 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4429 GET_MODE (target), temp,
4430 SUBREG_PROMOTED_UNSIGNED_P (target));
4433 convert_move (SUBREG_REG (target), temp,
4434 SUBREG_PROMOTED_UNSIGNED_P (target));
4436 /* If we promoted a constant, change the mode back down to match
4437 target. Otherwise, the caller might get confused by a result whose
4438 mode is larger than expected. */
4440 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4442 if (GET_MODE (temp) != VOIDmode)
4444 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4445 SUBREG_PROMOTED_VAR_P (temp) = 1;
4446 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4447 SUBREG_PROMOTED_UNSIGNED_P (target));
4450 temp = convert_modes (GET_MODE (target),
4451 GET_MODE (SUBREG_REG (target)),
4452 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4455 return want_value & 1 ? temp : NULL_RTX;
4459 temp = expand_expr (exp, target, GET_MODE (target),
4460 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4461 /* Return TARGET if it's a specified hardware register.
4462 If TARGET is a volatile mem ref, either return TARGET
4463 or return a reg copied *from* TARGET; ANSI requires this.
4465 Otherwise, if TEMP is not TARGET, return TEMP
4466 if it is constant (for efficiency),
4467 or if we really want the correct value. */
4468 if (!(target && GET_CODE (target) == REG
4469 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4470 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4471 && ! rtx_equal_p (temp, target)
4472 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4473 dont_return_target = 1;
4476 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4477 the same as that of TARGET, adjust the constant. This is needed, for
4478 example, in case it is a CONST_DOUBLE and we want only a word-sized
4480 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4481 && TREE_CODE (exp) != ERROR_MARK
4482 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4483 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4484 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4486 /* If value was not generated in the target, store it there.
4487 Convert the value to TARGET's type first if necessary.
4488 If TEMP and TARGET compare equal according to rtx_equal_p, but
4489 one or both of them are volatile memory refs, we have to distinguish
4491 - expand_expr has used TARGET. In this case, we must not generate
4492 another copy. This can be detected by TARGET being equal according
4494 - expand_expr has not used TARGET - that means that the source just
4495 happens to have the same RTX form. Since temp will have been created
4496 by expand_expr, it will compare unequal according to == .
4497 We must generate a copy in this case, to reach the correct number
4498 of volatile memory references. */
4500 if ((! rtx_equal_p (temp, target)
4501 || (temp != target && (side_effects_p (temp)
4502 || side_effects_p (target))))
4503 && TREE_CODE (exp) != ERROR_MARK
4504 && ! dont_store_target
4505 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4506 but TARGET is not valid memory reference, TEMP will differ
4507 from TARGET although it is really the same location. */
4508 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4509 || target != DECL_RTL_IF_SET (exp))
4510 /* If there's nothing to copy, don't bother. Don't call expr_size
4511 unless necessary, because some front-ends (C++) expr_size-hook
4512 aborts on objects that are not supposed to be bit-copied or
4514 && expr_size (exp) != const0_rtx)
4516 target = protect_from_queue (target, 1);
4517 if (GET_MODE (temp) != GET_MODE (target)
4518 && GET_MODE (temp) != VOIDmode)
4520 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4521 if (dont_return_target)
4523 /* In this case, we will return TEMP,
4524 so make sure it has the proper mode.
4525 But don't forget to store the value into TARGET. */
4526 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4527 emit_move_insn (target, temp);
4530 convert_move (target, temp, unsignedp);
4533 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4535 /* Handle copying a string constant into an array. The string
4536 constant may be shorter than the array. So copy just the string's
4537 actual length, and clear the rest. First get the size of the data
4538 type of the string, which is actually the size of the target. */
4539 rtx size = expr_size (exp);
4541 if (GET_CODE (size) == CONST_INT
4542 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4543 emit_block_move (target, temp, size,
4545 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4548 /* Compute the size of the data to copy from the string. */
4550 = size_binop (MIN_EXPR,
4551 make_tree (sizetype, size),
4552 size_int (TREE_STRING_LENGTH (exp)));
4554 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4556 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4559 /* Copy that much. */
4560 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4561 emit_block_move (target, temp, copy_size_rtx,
4563 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4565 /* Figure out how much is left in TARGET that we have to clear.
4566 Do all calculations in ptr_mode. */
4567 if (GET_CODE (copy_size_rtx) == CONST_INT)
4569 size = plus_constant (size, -INTVAL (copy_size_rtx));
4570 target = adjust_address (target, BLKmode,
4571 INTVAL (copy_size_rtx));
4575 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4576 copy_size_rtx, NULL_RTX, 0,
4579 #ifdef POINTERS_EXTEND_UNSIGNED
4580 if (GET_MODE (copy_size_rtx) != Pmode)
4581 copy_size_rtx = convert_memory_address (Pmode,
4585 target = offset_address (target, copy_size_rtx,
4586 highest_pow2_factor (copy_size));
4587 label = gen_label_rtx ();
4588 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4589 GET_MODE (size), 0, label);
4592 if (size != const0_rtx)
4593 clear_storage (target, size);
4599 /* Handle calls that return values in multiple non-contiguous locations.
4600 The Irix 6 ABI has examples of this. */
4601 else if (GET_CODE (target) == PARALLEL)
4602 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4603 else if (GET_MODE (temp) == BLKmode)
4604 emit_block_move (target, temp, expr_size (exp),
4606 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4608 emit_move_insn (target, temp);
4611 /* If we don't want a value, return NULL_RTX. */
4612 if ((want_value & 1) == 0)
4615 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4616 ??? The latter test doesn't seem to make sense. */
4617 else if (dont_return_target && GET_CODE (temp) != MEM)
4620 /* Return TARGET itself if it is a hard register. */
4621 else if ((want_value & 1) != 0
4622 && GET_MODE (target) != BLKmode
4623 && ! (GET_CODE (target) == REG
4624 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4625 return copy_to_reg (target);
4631 /* Return 1 if EXP just contains zeros. */
4639 switch (TREE_CODE (exp))
4643 case NON_LVALUE_EXPR:
4644 case VIEW_CONVERT_EXPR:
4645 return is_zeros_p (TREE_OPERAND (exp, 0));
4648 return integer_zerop (exp);
4652 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4655 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4658 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4659 elt = TREE_CHAIN (elt))
4660 if (!is_zeros_p (TREE_VALUE (elt)))
4666 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4667 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4668 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4669 if (! is_zeros_p (TREE_VALUE (elt)))
4679 /* Return 1 if EXP contains mostly (3/4) zeros. */
4682 mostly_zeros_p (exp)
4685 if (TREE_CODE (exp) == CONSTRUCTOR)
4687 int elts = 0, zeros = 0;
4688 tree elt = CONSTRUCTOR_ELTS (exp);
4689 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4691 /* If there are no ranges of true bits, it is all zero. */
4692 return elt == NULL_TREE;
4694 for (; elt; elt = TREE_CHAIN (elt))
4696 /* We do not handle the case where the index is a RANGE_EXPR,
4697 so the statistic will be somewhat inaccurate.
4698 We do make a more accurate count in store_constructor itself,
4699 so since this function is only used for nested array elements,
4700 this should be close enough. */
4701 if (mostly_zeros_p (TREE_VALUE (elt)))
4706 return 4 * zeros >= 3 * elts;
4709 return is_zeros_p (exp);
4712 /* Helper function for store_constructor.
4713 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4714 TYPE is the type of the CONSTRUCTOR, not the element type.
4715 CLEARED is as for store_constructor.
4716 ALIAS_SET is the alias set to use for any stores.
4718 This provides a recursive shortcut back to store_constructor when it isn't
4719 necessary to go through store_field. This is so that we can pass through
4720 the cleared field to let store_constructor know that we may not have to
4721 clear a substructure if the outer structure has already been cleared. */
4724 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4727 unsigned HOST_WIDE_INT bitsize;
4728 HOST_WIDE_INT bitpos;
4729 enum machine_mode mode;
4734 if (TREE_CODE (exp) == CONSTRUCTOR
4735 && bitpos % BITS_PER_UNIT == 0
4736 /* If we have a nonzero bitpos for a register target, then we just
4737 let store_field do the bitfield handling. This is unlikely to
4738 generate unnecessary clear instructions anyways. */
4739 && (bitpos == 0 || GET_CODE (target) == MEM))
4741 if (GET_CODE (target) == MEM)
4743 = adjust_address (target,
4744 GET_MODE (target) == BLKmode
4746 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4747 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4750 /* Update the alias set, if required. */
4751 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4752 && MEM_ALIAS_SET (target) != 0)
4754 target = copy_rtx (target);
4755 set_mem_alias_set (target, alias_set);
4758 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4761 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4765 /* Store the value of constructor EXP into the rtx TARGET.
4766 TARGET is either a REG or a MEM; we know it cannot conflict, since
4767 safe_from_p has been called.
4768 CLEARED is true if TARGET is known to have been zero'd.
4769 SIZE is the number of bytes of TARGET we are allowed to modify: this
4770 may not be the same as the size of EXP if we are assigning to a field
4771 which has been packed to exclude padding bits. */
4774 store_constructor (exp, target, cleared, size)
4780 tree type = TREE_TYPE (exp);
4781 #ifdef WORD_REGISTER_OPERATIONS
4782 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4785 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4786 || TREE_CODE (type) == QUAL_UNION_TYPE)
4790 /* We either clear the aggregate or indicate the value is dead. */
4791 if ((TREE_CODE (type) == UNION_TYPE
4792 || TREE_CODE (type) == QUAL_UNION_TYPE)
4794 && ! CONSTRUCTOR_ELTS (exp))
4795 /* If the constructor is empty, clear the union. */
4797 clear_storage (target, expr_size (exp));
4801 /* If we are building a static constructor into a register,
4802 set the initial value as zero so we can fold the value into
4803 a constant. But if more than one register is involved,
4804 this probably loses. */
4805 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4806 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4808 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4812 /* If the constructor has fewer fields than the structure
4813 or if we are initializing the structure to mostly zeros,
4814 clear the whole structure first. Don't do this if TARGET is a
4815 register whose mode size isn't equal to SIZE since clear_storage
4816 can't handle this case. */
4817 else if (! cleared && size > 0
4818 && ((list_length (CONSTRUCTOR_ELTS (exp))
4819 != fields_length (type))
4820 || mostly_zeros_p (exp))
4821 && (GET_CODE (target) != REG
4822 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4825 clear_storage (target, GEN_INT (size));
4830 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4832 /* Store each element of the constructor into
4833 the corresponding field of TARGET. */
4835 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4837 tree field = TREE_PURPOSE (elt);
4838 tree value = TREE_VALUE (elt);
4839 enum machine_mode mode;
4840 HOST_WIDE_INT bitsize;
4841 HOST_WIDE_INT bitpos = 0;
4843 rtx to_rtx = target;
4845 /* Just ignore missing fields.
4846 We cleared the whole structure, above,
4847 if any fields are missing. */
4851 if (cleared && is_zeros_p (value))
4854 if (host_integerp (DECL_SIZE (field), 1))
4855 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4859 mode = DECL_MODE (field);
4860 if (DECL_BIT_FIELD (field))
4863 offset = DECL_FIELD_OFFSET (field);
4864 if (host_integerp (offset, 0)
4865 && host_integerp (bit_position (field), 0))
4867 bitpos = int_bit_position (field);
4871 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4877 if (contains_placeholder_p (offset))
4878 offset = build (WITH_RECORD_EXPR, sizetype,
4879 offset, make_tree (TREE_TYPE (exp), target));
4881 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4882 if (GET_CODE (to_rtx) != MEM)
4885 #ifdef POINTERS_EXTEND_UNSIGNED
4886 if (GET_MODE (offset_rtx) != Pmode)
4887 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4889 if (GET_MODE (offset_rtx) != ptr_mode)
4890 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4893 to_rtx = offset_address (to_rtx, offset_rtx,
4894 highest_pow2_factor (offset));
4897 if (TREE_READONLY (field))
4899 if (GET_CODE (to_rtx) == MEM)
4900 to_rtx = copy_rtx (to_rtx);
4902 RTX_UNCHANGING_P (to_rtx) = 1;
4905 #ifdef WORD_REGISTER_OPERATIONS
4906 /* If this initializes a field that is smaller than a word, at the
4907 start of a word, try to widen it to a full word.
4908 This special case allows us to output C++ member function
4909 initializations in a form that the optimizers can understand. */
4910 if (GET_CODE (target) == REG
4911 && bitsize < BITS_PER_WORD
4912 && bitpos % BITS_PER_WORD == 0
4913 && GET_MODE_CLASS (mode) == MODE_INT
4914 && TREE_CODE (value) == INTEGER_CST
4916 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4918 tree type = TREE_TYPE (value);
4920 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4922 type = (*lang_hooks.types.type_for_size)
4923 (BITS_PER_WORD, TREE_UNSIGNED (type));
4924 value = convert (type, value);
4927 if (BYTES_BIG_ENDIAN)
4929 = fold (build (LSHIFT_EXPR, type, value,
4930 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4931 bitsize = BITS_PER_WORD;
4936 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4937 && DECL_NONADDRESSABLE_P (field))
4939 to_rtx = copy_rtx (to_rtx);
4940 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4943 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4944 value, type, cleared,
4945 get_alias_set (TREE_TYPE (field)));
4948 else if (TREE_CODE (type) == ARRAY_TYPE
4949 || TREE_CODE (type) == VECTOR_TYPE)
4954 tree domain = TYPE_DOMAIN (type);
4955 tree elttype = TREE_TYPE (type);
4957 HOST_WIDE_INT minelt = 0;
4958 HOST_WIDE_INT maxelt = 0;
4960 /* Vectors are like arrays, but the domain is stored via an array
4962 if (TREE_CODE (type) == VECTOR_TYPE)
4964 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4965 the same field as TYPE_DOMAIN, we are not guaranteed that
4967 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4968 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4971 const_bounds_p = (TYPE_MIN_VALUE (domain)
4972 && TYPE_MAX_VALUE (domain)
4973 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4974 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4976 /* If we have constant bounds for the range of the type, get them. */
4979 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4980 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4983 /* If the constructor has fewer elements than the array,
4984 clear the whole array first. Similarly if this is
4985 static constructor of a non-BLKmode object. */
4986 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4990 HOST_WIDE_INT count = 0, zero_count = 0;
4991 need_to_clear = ! const_bounds_p;
4993 /* This loop is a more accurate version of the loop in
4994 mostly_zeros_p (it handles RANGE_EXPR in an index).
4995 It is also needed to check for missing elements. */
4996 for (elt = CONSTRUCTOR_ELTS (exp);
4997 elt != NULL_TREE && ! need_to_clear;
4998 elt = TREE_CHAIN (elt))
5000 tree index = TREE_PURPOSE (elt);
5001 HOST_WIDE_INT this_node_count;
5003 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5005 tree lo_index = TREE_OPERAND (index, 0);
5006 tree hi_index = TREE_OPERAND (index, 1);
5008 if (! host_integerp (lo_index, 1)
5009 || ! host_integerp (hi_index, 1))
5015 this_node_count = (tree_low_cst (hi_index, 1)
5016 - tree_low_cst (lo_index, 1) + 1);
5019 this_node_count = 1;
5021 count += this_node_count;
5022 if (mostly_zeros_p (TREE_VALUE (elt)))
5023 zero_count += this_node_count;
5026 /* Clear the entire array first if there are any missing elements,
5027 or if the incidence of zero elements is >= 75%. */
5029 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5033 if (need_to_clear && size > 0)
5038 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5040 clear_storage (target, GEN_INT (size));
5044 else if (REG_P (target))
5045 /* Inform later passes that the old value is dead. */
5046 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5048 /* Store each element of the constructor into
5049 the corresponding element of TARGET, determined
5050 by counting the elements. */
5051 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5053 elt = TREE_CHAIN (elt), i++)
5055 enum machine_mode mode;
5056 HOST_WIDE_INT bitsize;
5057 HOST_WIDE_INT bitpos;
5059 tree value = TREE_VALUE (elt);
5060 tree index = TREE_PURPOSE (elt);
5061 rtx xtarget = target;
5063 if (cleared && is_zeros_p (value))
5066 unsignedp = TREE_UNSIGNED (elttype);
5067 mode = TYPE_MODE (elttype);
5068 if (mode == BLKmode)
5069 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5070 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5073 bitsize = GET_MODE_BITSIZE (mode);
5075 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5077 tree lo_index = TREE_OPERAND (index, 0);
5078 tree hi_index = TREE_OPERAND (index, 1);
5079 rtx index_r, pos_rtx, loop_end;
5080 struct nesting *loop;
5081 HOST_WIDE_INT lo, hi, count;
5084 /* If the range is constant and "small", unroll the loop. */
5086 && host_integerp (lo_index, 0)
5087 && host_integerp (hi_index, 0)
5088 && (lo = tree_low_cst (lo_index, 0),
5089 hi = tree_low_cst (hi_index, 0),
5090 count = hi - lo + 1,
5091 (GET_CODE (target) != MEM
5093 || (host_integerp (TYPE_SIZE (elttype), 1)
5094 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5097 lo -= minelt; hi -= minelt;
5098 for (; lo <= hi; lo++)
5100 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5102 if (GET_CODE (target) == MEM
5103 && !MEM_KEEP_ALIAS_SET_P (target)
5104 && TREE_CODE (type) == ARRAY_TYPE
5105 && TYPE_NONALIASED_COMPONENT (type))
5107 target = copy_rtx (target);
5108 MEM_KEEP_ALIAS_SET_P (target) = 1;
5111 store_constructor_field
5112 (target, bitsize, bitpos, mode, value, type, cleared,
5113 get_alias_set (elttype));
5118 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5119 loop_end = gen_label_rtx ();
5121 unsignedp = TREE_UNSIGNED (domain);
5123 index = build_decl (VAR_DECL, NULL_TREE, domain);
5126 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5128 SET_DECL_RTL (index, index_r);
5129 if (TREE_CODE (value) == SAVE_EXPR
5130 && SAVE_EXPR_RTL (value) == 0)
5132 /* Make sure value gets expanded once before the
5134 expand_expr (value, const0_rtx, VOIDmode, 0);
5137 store_expr (lo_index, index_r, 0);
5138 loop = expand_start_loop (0);
5140 /* Assign value to element index. */
5142 = convert (ssizetype,
5143 fold (build (MINUS_EXPR, TREE_TYPE (index),
5144 index, TYPE_MIN_VALUE (domain))));
5145 position = size_binop (MULT_EXPR, position,
5147 TYPE_SIZE_UNIT (elttype)));
5149 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5150 xtarget = offset_address (target, pos_rtx,
5151 highest_pow2_factor (position));
5152 xtarget = adjust_address (xtarget, mode, 0);
5153 if (TREE_CODE (value) == CONSTRUCTOR)
5154 store_constructor (value, xtarget, cleared,
5155 bitsize / BITS_PER_UNIT);
5157 store_expr (value, xtarget, 0);
5159 expand_exit_loop_if_false (loop,
5160 build (LT_EXPR, integer_type_node,
5163 expand_increment (build (PREINCREMENT_EXPR,
5165 index, integer_one_node), 0, 0);
5167 emit_label (loop_end);
5170 else if ((index != 0 && ! host_integerp (index, 0))
5171 || ! host_integerp (TYPE_SIZE (elttype), 1))
5176 index = ssize_int (1);
5179 index = convert (ssizetype,
5180 fold (build (MINUS_EXPR, index,
5181 TYPE_MIN_VALUE (domain))));
5183 position = size_binop (MULT_EXPR, index,
5185 TYPE_SIZE_UNIT (elttype)));
5186 xtarget = offset_address (target,
5187 expand_expr (position, 0, VOIDmode, 0),
5188 highest_pow2_factor (position));
5189 xtarget = adjust_address (xtarget, mode, 0);
5190 store_expr (value, xtarget, 0);
5195 bitpos = ((tree_low_cst (index, 0) - minelt)
5196 * tree_low_cst (TYPE_SIZE (elttype), 1));
5198 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5200 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5201 && TREE_CODE (type) == ARRAY_TYPE
5202 && TYPE_NONALIASED_COMPONENT (type))
5204 target = copy_rtx (target);
5205 MEM_KEEP_ALIAS_SET_P (target) = 1;
5208 store_constructor_field (target, bitsize, bitpos, mode, value,
5209 type, cleared, get_alias_set (elttype));
5215 /* Set constructor assignments. */
5216 else if (TREE_CODE (type) == SET_TYPE)
5218 tree elt = CONSTRUCTOR_ELTS (exp);
5219 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5220 tree domain = TYPE_DOMAIN (type);
5221 tree domain_min, domain_max, bitlength;
5223 /* The default implementation strategy is to extract the constant
5224 parts of the constructor, use that to initialize the target,
5225 and then "or" in whatever non-constant ranges we need in addition.
5227 If a large set is all zero or all ones, it is
5228 probably better to set it using memset (if available) or bzero.
5229 Also, if a large set has just a single range, it may also be
5230 better to first clear all the first clear the set (using
5231 bzero/memset), and set the bits we want. */
5233 /* Check for all zeros. */
5234 if (elt == NULL_TREE && size > 0)
5237 clear_storage (target, GEN_INT (size));
5241 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5242 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5243 bitlength = size_binop (PLUS_EXPR,
5244 size_diffop (domain_max, domain_min),
5247 nbits = tree_low_cst (bitlength, 1);
5249 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5250 are "complicated" (more than one range), initialize (the
5251 constant parts) by copying from a constant. */
5252 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5253 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5255 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5256 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5257 char *bit_buffer = (char *) alloca (nbits);
5258 HOST_WIDE_INT word = 0;
5259 unsigned int bit_pos = 0;
5260 unsigned int ibit = 0;
5261 unsigned int offset = 0; /* In bytes from beginning of set. */
5263 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5266 if (bit_buffer[ibit])
5268 if (BYTES_BIG_ENDIAN)
5269 word |= (1 << (set_word_size - 1 - bit_pos));
5271 word |= 1 << bit_pos;
5275 if (bit_pos >= set_word_size || ibit == nbits)
5277 if (word != 0 || ! cleared)
5279 rtx datum = GEN_INT (word);
5282 /* The assumption here is that it is safe to use
5283 XEXP if the set is multi-word, but not if
5284 it's single-word. */
5285 if (GET_CODE (target) == MEM)
5286 to_rtx = adjust_address (target, mode, offset);
5287 else if (offset == 0)
5291 emit_move_insn (to_rtx, datum);
5298 offset += set_word_size / BITS_PER_UNIT;
5303 /* Don't bother clearing storage if the set is all ones. */
5304 if (TREE_CHAIN (elt) != NULL_TREE
5305 || (TREE_PURPOSE (elt) == NULL_TREE
5307 : ( ! host_integerp (TREE_VALUE (elt), 0)
5308 || ! host_integerp (TREE_PURPOSE (elt), 0)
5309 || (tree_low_cst (TREE_VALUE (elt), 0)
5310 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5311 != (HOST_WIDE_INT) nbits))))
5312 clear_storage (target, expr_size (exp));
5314 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5316 /* Start of range of element or NULL. */
5317 tree startbit = TREE_PURPOSE (elt);
5318 /* End of range of element, or element value. */
5319 tree endbit = TREE_VALUE (elt);
5320 HOST_WIDE_INT startb, endb;
5321 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5323 bitlength_rtx = expand_expr (bitlength,
5324 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5326 /* Handle non-range tuple element like [ expr ]. */
5327 if (startbit == NULL_TREE)
5329 startbit = save_expr (endbit);
5333 startbit = convert (sizetype, startbit);
5334 endbit = convert (sizetype, endbit);
5335 if (! integer_zerop (domain_min))
5337 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5338 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5340 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5341 EXPAND_CONST_ADDRESS);
5342 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5343 EXPAND_CONST_ADDRESS);
5349 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5350 (GET_MODE (target), 0),
5353 emit_move_insn (targetx, target);
5356 else if (GET_CODE (target) == MEM)
5361 /* Optimization: If startbit and endbit are constants divisible
5362 by BITS_PER_UNIT, call memset instead. */
5363 if (TARGET_MEM_FUNCTIONS
5364 && TREE_CODE (startbit) == INTEGER_CST
5365 && TREE_CODE (endbit) == INTEGER_CST
5366 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5367 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5369 emit_library_call (memset_libfunc, LCT_NORMAL,
5371 plus_constant (XEXP (targetx, 0),
5372 startb / BITS_PER_UNIT),
5374 constm1_rtx, TYPE_MODE (integer_type_node),
5375 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5376 TYPE_MODE (sizetype));
5379 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5380 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5381 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5382 startbit_rtx, TYPE_MODE (sizetype),
5383 endbit_rtx, TYPE_MODE (sizetype));
5386 emit_move_insn (target, targetx);
5394 /* Store the value of EXP (an expression tree)
5395 into a subfield of TARGET which has mode MODE and occupies
5396 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5397 If MODE is VOIDmode, it means that we are storing into a bit-field.
5399 If VALUE_MODE is VOIDmode, return nothing in particular.
5400 UNSIGNEDP is not used in this case.
5402 Otherwise, return an rtx for the value stored. This rtx
5403 has mode VALUE_MODE if that is convenient to do.
5404 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5406 TYPE is the type of the underlying object,
5408 ALIAS_SET is the alias set for the destination. This value will
5409 (in general) be different from that for TARGET, since TARGET is a
5410 reference to the containing structure. */
5413 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5416 HOST_WIDE_INT bitsize;
5417 HOST_WIDE_INT bitpos;
5418 enum machine_mode mode;
5420 enum machine_mode value_mode;
5425 HOST_WIDE_INT width_mask = 0;
5427 if (TREE_CODE (exp) == ERROR_MARK)
5430 /* If we have nothing to store, do nothing unless the expression has
5433 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5434 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5435 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5437 /* If we are storing into an unaligned field of an aligned union that is
5438 in a register, we may have the mode of TARGET being an integer mode but
5439 MODE == BLKmode. In that case, get an aligned object whose size and
5440 alignment are the same as TARGET and store TARGET into it (we can avoid
5441 the store if the field being stored is the entire width of TARGET). Then
5442 call ourselves recursively to store the field into a BLKmode version of
5443 that object. Finally, load from the object into TARGET. This is not
5444 very efficient in general, but should only be slightly more expensive
5445 than the otherwise-required unaligned accesses. Perhaps this can be
5446 cleaned up later. */
5449 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5453 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5455 rtx blk_object = adjust_address (object, BLKmode, 0);
5457 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5458 emit_move_insn (object, target);
5460 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5463 emit_move_insn (target, object);
5465 /* We want to return the BLKmode version of the data. */
5469 if (GET_CODE (target) == CONCAT)
5471 /* We're storing into a struct containing a single __complex. */
5475 return store_expr (exp, target, 0);
5478 /* If the structure is in a register or if the component
5479 is a bit field, we cannot use addressing to access it.
5480 Use bit-field techniques or SUBREG to store in it. */
5482 if (mode == VOIDmode
5483 || (mode != BLKmode && ! direct_store[(int) mode]
5484 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5485 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5486 || GET_CODE (target) == REG
5487 || GET_CODE (target) == SUBREG
5488 /* If the field isn't aligned enough to store as an ordinary memref,
5489 store it as a bit field. */
5490 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5491 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5492 || bitpos % GET_MODE_ALIGNMENT (mode)))
5493 /* If the RHS and field are a constant size and the size of the
5494 RHS isn't the same size as the bitfield, we must use bitfield
5497 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5498 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5500 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5502 /* If BITSIZE is narrower than the size of the type of EXP
5503 we will be narrowing TEMP. Normally, what's wanted are the
5504 low-order bits. However, if EXP's type is a record and this is
5505 big-endian machine, we want the upper BITSIZE bits. */
5506 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5507 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5508 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5509 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5510 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5514 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5516 if (mode != VOIDmode && mode != BLKmode
5517 && mode != TYPE_MODE (TREE_TYPE (exp)))
5518 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5520 /* If the modes of TARGET and TEMP are both BLKmode, both
5521 must be in memory and BITPOS must be aligned on a byte
5522 boundary. If so, we simply do a block copy. */
5523 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5525 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5526 || bitpos % BITS_PER_UNIT != 0)
5529 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5530 emit_block_move (target, temp,
5531 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5535 return value_mode == VOIDmode ? const0_rtx : target;
5538 /* Store the value in the bitfield. */
5539 store_bit_field (target, bitsize, bitpos, mode, temp,
5540 int_size_in_bytes (type));
5542 if (value_mode != VOIDmode)
5544 /* The caller wants an rtx for the value.
5545 If possible, avoid refetching from the bitfield itself. */
5547 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5550 enum machine_mode tmode;
5552 tmode = GET_MODE (temp);
5553 if (tmode == VOIDmode)
5557 return expand_and (tmode, temp,
5558 gen_int_mode (width_mask, tmode),
5561 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5562 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5563 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5566 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5567 NULL_RTX, value_mode, VOIDmode,
5568 int_size_in_bytes (type));
5574 rtx addr = XEXP (target, 0);
5575 rtx to_rtx = target;
5577 /* If a value is wanted, it must be the lhs;
5578 so make the address stable for multiple use. */
5580 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5581 && ! CONSTANT_ADDRESS_P (addr)
5582 /* A frame-pointer reference is already stable. */
5583 && ! (GET_CODE (addr) == PLUS
5584 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5585 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5586 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5587 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5589 /* Now build a reference to just the desired component. */
5591 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5593 if (to_rtx == target)
5594 to_rtx = copy_rtx (to_rtx);
5596 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5597 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5598 set_mem_alias_set (to_rtx, alias_set);
5600 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5604 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5605 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5606 codes and find the ultimate containing object, which we return.
5608 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5609 bit position, and *PUNSIGNEDP to the signedness of the field.
5610 If the position of the field is variable, we store a tree
5611 giving the variable offset (in units) in *POFFSET.
5612 This offset is in addition to the bit position.
5613 If the position is not variable, we store 0 in *POFFSET.
5615 If any of the extraction expressions is volatile,
5616 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5618 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5619 is a mode that can be used to access the field. In that case, *PBITSIZE
5622 If the field describes a variable-sized object, *PMODE is set to
5623 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5624 this case, but the address of the object can be found. */
5627 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5628 punsignedp, pvolatilep)
5630 HOST_WIDE_INT *pbitsize;
5631 HOST_WIDE_INT *pbitpos;
5633 enum machine_mode *pmode;
5638 enum machine_mode mode = VOIDmode;
5639 tree offset = size_zero_node;
5640 tree bit_offset = bitsize_zero_node;
5641 tree placeholder_ptr = 0;
5644 /* First get the mode, signedness, and size. We do this from just the
5645 outermost expression. */
5646 if (TREE_CODE (exp) == COMPONENT_REF)
5648 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5649 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5650 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5652 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5654 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5656 size_tree = TREE_OPERAND (exp, 1);
5657 *punsignedp = TREE_UNSIGNED (exp);
5661 mode = TYPE_MODE (TREE_TYPE (exp));
5662 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5664 if (mode == BLKmode)
5665 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5667 *pbitsize = GET_MODE_BITSIZE (mode);
5672 if (! host_integerp (size_tree, 1))
5673 mode = BLKmode, *pbitsize = -1;
5675 *pbitsize = tree_low_cst (size_tree, 1);
5678 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5679 and find the ultimate containing object. */
5682 if (TREE_CODE (exp) == BIT_FIELD_REF)
5683 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5684 else if (TREE_CODE (exp) == COMPONENT_REF)
5686 tree field = TREE_OPERAND (exp, 1);
5687 tree this_offset = DECL_FIELD_OFFSET (field);
5689 /* If this field hasn't been filled in yet, don't go
5690 past it. This should only happen when folding expressions
5691 made during type construction. */
5692 if (this_offset == 0)
5694 else if (! TREE_CONSTANT (this_offset)
5695 && contains_placeholder_p (this_offset))
5696 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5698 offset = size_binop (PLUS_EXPR, offset, this_offset);
5699 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5700 DECL_FIELD_BIT_OFFSET (field));
5702 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5705 else if (TREE_CODE (exp) == ARRAY_REF
5706 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5708 tree index = TREE_OPERAND (exp, 1);
5709 tree array = TREE_OPERAND (exp, 0);
5710 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5711 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5712 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5714 /* We assume all arrays have sizes that are a multiple of a byte.
5715 First subtract the lower bound, if any, in the type of the
5716 index, then convert to sizetype and multiply by the size of the
5718 if (low_bound != 0 && ! integer_zerop (low_bound))
5719 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5722 /* If the index has a self-referential type, pass it to a
5723 WITH_RECORD_EXPR; if the component size is, pass our
5724 component to one. */
5725 if (! TREE_CONSTANT (index)
5726 && contains_placeholder_p (index))
5727 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5728 if (! TREE_CONSTANT (unit_size)
5729 && contains_placeholder_p (unit_size))
5730 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5732 offset = size_binop (PLUS_EXPR, offset,
5733 size_binop (MULT_EXPR,
5734 convert (sizetype, index),
5738 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5740 tree new = find_placeholder (exp, &placeholder_ptr);
5742 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5743 We might have been called from tree optimization where we
5744 haven't set up an object yet. */
5752 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5753 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5754 && ! ((TREE_CODE (exp) == NOP_EXPR
5755 || TREE_CODE (exp) == CONVERT_EXPR)
5756 && (TYPE_MODE (TREE_TYPE (exp))
5757 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5760 /* If any reference in the chain is volatile, the effect is volatile. */
5761 if (TREE_THIS_VOLATILE (exp))
5764 exp = TREE_OPERAND (exp, 0);
5767 /* If OFFSET is constant, see if we can return the whole thing as a
5768 constant bit position. Otherwise, split it up. */
5769 if (host_integerp (offset, 0)
5770 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5772 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5773 && host_integerp (tem, 0))
5774 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5776 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5782 /* Return 1 if T is an expression that get_inner_reference handles. */
5785 handled_component_p (t)
5788 switch (TREE_CODE (t))
5793 case ARRAY_RANGE_REF:
5794 case NON_LVALUE_EXPR:
5795 case VIEW_CONVERT_EXPR:
5800 return (TYPE_MODE (TREE_TYPE (t))
5801 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5808 /* Given an rtx VALUE that may contain additions and multiplications, return
5809 an equivalent value that just refers to a register, memory, or constant.
5810 This is done by generating instructions to perform the arithmetic and
5811 returning a pseudo-register containing the value.
5813 The returned value may be a REG, SUBREG, MEM or constant. */
5816 force_operand (value, target)
5820 /* Use subtarget as the target for operand 0 of a binary operation. */
5821 rtx subtarget = get_subtarget (target);
5822 enum rtx_code code = GET_CODE (value);
5824 /* Check for a PIC address load. */
5825 if ((code == PLUS || code == MINUS)
5826 && XEXP (value, 0) == pic_offset_table_rtx
5827 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5828 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5829 || GET_CODE (XEXP (value, 1)) == CONST))
5832 subtarget = gen_reg_rtx (GET_MODE (value));
5833 emit_move_insn (subtarget, value);
5837 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5840 target = gen_reg_rtx (GET_MODE (value));
5841 convert_move (target, force_operand (XEXP (value, 0), NULL),
5842 code == ZERO_EXTEND);
5846 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5848 op2 = XEXP (value, 1);
5849 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5851 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5854 op2 = negate_rtx (GET_MODE (value), op2);
5857 /* Check for an addition with OP2 a constant integer and our first
5858 operand a PLUS of a virtual register and something else. In that
5859 case, we want to emit the sum of the virtual register and the
5860 constant first and then add the other value. This allows virtual
5861 register instantiation to simply modify the constant rather than
5862 creating another one around this addition. */
5863 if (code == PLUS && GET_CODE (op2) == CONST_INT
5864 && GET_CODE (XEXP (value, 0)) == PLUS
5865 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5866 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5867 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5869 rtx temp = expand_simple_binop (GET_MODE (value), code,
5870 XEXP (XEXP (value, 0), 0), op2,
5871 subtarget, 0, OPTAB_LIB_WIDEN);
5872 return expand_simple_binop (GET_MODE (value), code, temp,
5873 force_operand (XEXP (XEXP (value,
5875 target, 0, OPTAB_LIB_WIDEN);
5878 op1 = force_operand (XEXP (value, 0), subtarget);
5879 op2 = force_operand (op2, NULL_RTX);
5883 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5885 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5886 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5887 target, 1, OPTAB_LIB_WIDEN);
5889 return expand_divmod (0,
5890 FLOAT_MODE_P (GET_MODE (value))
5891 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5892 GET_MODE (value), op1, op2, target, 0);
5895 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5899 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5903 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5907 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5908 target, 0, OPTAB_LIB_WIDEN);
5911 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5912 target, 1, OPTAB_LIB_WIDEN);
5915 if (GET_RTX_CLASS (code) == '1')
5917 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5918 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5921 #ifdef INSN_SCHEDULING
5922 /* On machines that have insn scheduling, we want all memory reference to be
5923 explicit, so we need to deal with such paradoxical SUBREGs. */
5924 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5925 && (GET_MODE_SIZE (GET_MODE (value))
5926 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5928 = simplify_gen_subreg (GET_MODE (value),
5929 force_reg (GET_MODE (SUBREG_REG (value)),
5930 force_operand (SUBREG_REG (value),
5932 GET_MODE (SUBREG_REG (value)),
5933 SUBREG_BYTE (value));
5939 /* Subroutine of expand_expr: return nonzero iff there is no way that
5940 EXP can reference X, which is being modified. TOP_P is nonzero if this
5941 call is going to be used to determine whether we need a temporary
5942 for EXP, as opposed to a recursive call to this function.
5944 It is always safe for this routine to return zero since it merely
5945 searches for optimization opportunities. */
5948 safe_from_p (x, exp, top_p)
5955 static tree save_expr_list;
5958 /* If EXP has varying size, we MUST use a target since we currently
5959 have no way of allocating temporaries of variable size
5960 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5961 So we assume here that something at a higher level has prevented a
5962 clash. This is somewhat bogus, but the best we can do. Only
5963 do this when X is BLKmode and when we are at the top level. */
5964 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5965 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5966 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5967 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5968 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5970 && GET_MODE (x) == BLKmode)
5971 /* If X is in the outgoing argument area, it is always safe. */
5972 || (GET_CODE (x) == MEM
5973 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5974 || (GET_CODE (XEXP (x, 0)) == PLUS
5975 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5978 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5979 find the underlying pseudo. */
5980 if (GET_CODE (x) == SUBREG)
5983 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5987 /* A SAVE_EXPR might appear many times in the expression passed to the
5988 top-level safe_from_p call, and if it has a complex subexpression,
5989 examining it multiple times could result in a combinatorial explosion.
5990 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5991 with optimization took about 28 minutes to compile -- even though it was
5992 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5993 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5994 we have processed. Note that the only test of top_p was above. */
6003 rtn = safe_from_p (x, exp, 0);
6005 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6006 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6011 /* Now look at our tree code and possibly recurse. */
6012 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6015 exp_rtl = DECL_RTL_IF_SET (exp);
6022 if (TREE_CODE (exp) == TREE_LIST)
6023 return ((TREE_VALUE (exp) == 0
6024 || safe_from_p (x, TREE_VALUE (exp), 0))
6025 && (TREE_CHAIN (exp) == 0
6026 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6027 else if (TREE_CODE (exp) == ERROR_MARK)
6028 return 1; /* An already-visited SAVE_EXPR? */
6033 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6037 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6038 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6042 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6043 the expression. If it is set, we conflict iff we are that rtx or
6044 both are in memory. Otherwise, we check all operands of the
6045 expression recursively. */
6047 switch (TREE_CODE (exp))
6050 /* If the operand is static or we are static, we can't conflict.
6051 Likewise if we don't conflict with the operand at all. */
6052 if (staticp (TREE_OPERAND (exp, 0))
6053 || TREE_STATIC (exp)
6054 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6057 /* Otherwise, the only way this can conflict is if we are taking
6058 the address of a DECL a that address if part of X, which is
6060 exp = TREE_OPERAND (exp, 0);
6063 if (!DECL_RTL_SET_P (exp)
6064 || GET_CODE (DECL_RTL (exp)) != MEM)
6067 exp_rtl = XEXP (DECL_RTL (exp), 0);
6072 if (GET_CODE (x) == MEM
6073 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6074 get_alias_set (exp)))
6079 /* Assume that the call will clobber all hard registers and
6081 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6082 || GET_CODE (x) == MEM)
6087 /* If a sequence exists, we would have to scan every instruction
6088 in the sequence to see if it was safe. This is probably not
6090 if (RTL_EXPR_SEQUENCE (exp))
6093 exp_rtl = RTL_EXPR_RTL (exp);
6096 case WITH_CLEANUP_EXPR:
6097 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6100 case CLEANUP_POINT_EXPR:
6101 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6104 exp_rtl = SAVE_EXPR_RTL (exp);
6108 /* If we've already scanned this, don't do it again. Otherwise,
6109 show we've scanned it and record for clearing the flag if we're
6111 if (TREE_PRIVATE (exp))
6114 TREE_PRIVATE (exp) = 1;
6115 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6117 TREE_PRIVATE (exp) = 0;
6121 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6125 /* The only operand we look at is operand 1. The rest aren't
6126 part of the expression. */
6127 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6129 case METHOD_CALL_EXPR:
6130 /* This takes an rtx argument, but shouldn't appear here. */
6137 /* If we have an rtx, we do not need to scan our operands. */
6141 nops = first_rtl_op (TREE_CODE (exp));
6142 for (i = 0; i < nops; i++)
6143 if (TREE_OPERAND (exp, i) != 0
6144 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6147 /* If this is a language-specific tree code, it may require
6148 special handling. */
6149 if ((unsigned int) TREE_CODE (exp)
6150 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6151 && !(*lang_hooks.safe_from_p) (x, exp))
6155 /* If we have an rtl, find any enclosed object. Then see if we conflict
6159 if (GET_CODE (exp_rtl) == SUBREG)
6161 exp_rtl = SUBREG_REG (exp_rtl);
6162 if (GET_CODE (exp_rtl) == REG
6163 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6167 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6168 are memory and they conflict. */
6169 return ! (rtx_equal_p (x, exp_rtl)
6170 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6171 && true_dependence (exp_rtl, VOIDmode, x,
6172 rtx_addr_varies_p)));
6175 /* If we reach here, it is safe. */
6179 /* Subroutine of expand_expr: return rtx if EXP is a
6180 variable or parameter; else return 0. */
6187 switch (TREE_CODE (exp))
6191 return DECL_RTL (exp);
6197 #ifdef MAX_INTEGER_COMPUTATION_MODE
6200 check_max_integer_computation_mode (exp)
6203 enum tree_code code;
6204 enum machine_mode mode;
6206 /* Strip any NOPs that don't change the mode. */
6208 code = TREE_CODE (exp);
6210 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6211 if (code == NOP_EXPR
6212 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6215 /* First check the type of the overall operation. We need only look at
6216 unary, binary and relational operations. */
6217 if (TREE_CODE_CLASS (code) == '1'
6218 || TREE_CODE_CLASS (code) == '2'
6219 || TREE_CODE_CLASS (code) == '<')
6221 mode = TYPE_MODE (TREE_TYPE (exp));
6222 if (GET_MODE_CLASS (mode) == MODE_INT
6223 && mode > MAX_INTEGER_COMPUTATION_MODE)
6224 internal_error ("unsupported wide integer operation");
6227 /* Check operand of a unary op. */
6228 if (TREE_CODE_CLASS (code) == '1')
6230 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6231 if (GET_MODE_CLASS (mode) == MODE_INT
6232 && mode > MAX_INTEGER_COMPUTATION_MODE)
6233 internal_error ("unsupported wide integer operation");
6236 /* Check operands of a binary/comparison op. */
6237 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6239 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6240 if (GET_MODE_CLASS (mode) == MODE_INT
6241 && mode > MAX_INTEGER_COMPUTATION_MODE)
6242 internal_error ("unsupported wide integer operation");
6244 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6245 if (GET_MODE_CLASS (mode) == MODE_INT
6246 && mode > MAX_INTEGER_COMPUTATION_MODE)
6247 internal_error ("unsupported wide integer operation");
6252 /* Return the highest power of two that EXP is known to be a multiple of.
6253 This is used in updating alignment of MEMs in array references. */
6255 static HOST_WIDE_INT
6256 highest_pow2_factor (exp)
6259 HOST_WIDE_INT c0, c1;
6261 switch (TREE_CODE (exp))
6264 /* We can find the lowest bit that's a one. If the low
6265 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6266 We need to handle this case since we can find it in a COND_EXPR,
6267 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6268 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6270 if (TREE_CONSTANT_OVERFLOW (exp))
6271 return BIGGEST_ALIGNMENT;
6274 /* Note: tree_low_cst is intentionally not used here,
6275 we don't care about the upper bits. */
6276 c0 = TREE_INT_CST_LOW (exp);
6278 return c0 ? c0 : BIGGEST_ALIGNMENT;
6282 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6283 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6284 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6285 return MIN (c0, c1);
6288 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6289 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6292 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6294 if (integer_pow2p (TREE_OPERAND (exp, 1))
6295 && host_integerp (TREE_OPERAND (exp, 1), 1))
6297 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6298 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6299 return MAX (1, c0 / c1);
6303 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6304 case SAVE_EXPR: case WITH_RECORD_EXPR:
6305 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6308 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6311 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6312 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6313 return MIN (c0, c1);
6322 /* Similar, except that it is known that the expression must be a multiple
6323 of the alignment of TYPE. */
6325 static HOST_WIDE_INT
6326 highest_pow2_factor_for_type (type, exp)
6330 HOST_WIDE_INT type_align, factor;
6332 factor = highest_pow2_factor (exp);
6333 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6334 return MAX (factor, type_align);
6337 /* Return an object on the placeholder list that matches EXP, a
6338 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6339 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6340 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6341 is a location which initially points to a starting location in the
6342 placeholder list (zero means start of the list) and where a pointer into
6343 the placeholder list at which the object is found is placed. */
6346 find_placeholder (exp, plist)
6350 tree type = TREE_TYPE (exp);
6351 tree placeholder_expr;
6353 for (placeholder_expr
6354 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6355 placeholder_expr != 0;
6356 placeholder_expr = TREE_CHAIN (placeholder_expr))
6358 tree need_type = TYPE_MAIN_VARIANT (type);
6361 /* Find the outermost reference that is of the type we want. If none,
6362 see if any object has a type that is a pointer to the type we
6364 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6365 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6366 || TREE_CODE (elt) == COND_EXPR)
6367 ? TREE_OPERAND (elt, 1)
6368 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6369 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6370 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6371 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6372 ? TREE_OPERAND (elt, 0) : 0))
6373 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6376 *plist = placeholder_expr;
6380 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6382 = ((TREE_CODE (elt) == COMPOUND_EXPR
6383 || TREE_CODE (elt) == COND_EXPR)
6384 ? TREE_OPERAND (elt, 1)
6385 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6386 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6387 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6388 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6389 ? TREE_OPERAND (elt, 0) : 0))
6390 if (POINTER_TYPE_P (TREE_TYPE (elt))
6391 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6395 *plist = placeholder_expr;
6396 return build1 (INDIRECT_REF, need_type, elt);
6403 /* expand_expr: generate code for computing expression EXP.
6404 An rtx for the computed value is returned. The value is never null.
6405 In the case of a void EXP, const0_rtx is returned.
6407 The value may be stored in TARGET if TARGET is nonzero.
6408 TARGET is just a suggestion; callers must assume that
6409 the rtx returned may not be the same as TARGET.
6411 If TARGET is CONST0_RTX, it means that the value will be ignored.
6413 If TMODE is not VOIDmode, it suggests generating the
6414 result in mode TMODE. But this is done only when convenient.
6415 Otherwise, TMODE is ignored and the value generated in its natural mode.
6416 TMODE is just a suggestion; callers must assume that
6417 the rtx returned may not have mode TMODE.
6419 Note that TARGET may have neither TMODE nor MODE. In that case, it
6420 probably will not be used.
6422 If MODIFIER is EXPAND_SUM then when EXP is an addition
6423 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6424 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6425 products as above, or REG or MEM, or constant.
6426 Ordinarily in such cases we would output mul or add instructions
6427 and then return a pseudo reg containing the sum.
6429 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6430 it also marks a label as absolutely required (it can't be dead).
6431 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6432 This is used for outputting expressions used in initializers.
6434 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6435 with a constant address even if that address is not normally legitimate.
6436 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6438 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6439 a call parameter. Such targets require special care as we haven't yet
6440 marked TARGET so that it's safe from being trashed by libcalls. We
6441 don't want to use TARGET for anything but the final result;
6442 Intermediate values must go elsewhere. Additionally, calls to
6443 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6446 expand_expr (exp, target, tmode, modifier)
6449 enum machine_mode tmode;
6450 enum expand_modifier modifier;
6453 tree type = TREE_TYPE (exp);
6454 int unsignedp = TREE_UNSIGNED (type);
6455 enum machine_mode mode;
6456 enum tree_code code = TREE_CODE (exp);
6458 rtx subtarget, original_target;
6462 /* Handle ERROR_MARK before anybody tries to access its type. */
6463 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6465 op0 = CONST0_RTX (tmode);
6471 mode = TYPE_MODE (type);
6472 /* Use subtarget as the target for operand 0 of a binary operation. */
6473 subtarget = get_subtarget (target);
6474 original_target = target;
6475 ignore = (target == const0_rtx
6476 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6477 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6478 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6479 && TREE_CODE (type) == VOID_TYPE));
6481 /* If we are going to ignore this result, we need only do something
6482 if there is a side-effect somewhere in the expression. If there
6483 is, short-circuit the most common cases here. Note that we must
6484 not call expand_expr with anything but const0_rtx in case this
6485 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6489 if (! TREE_SIDE_EFFECTS (exp))
6492 /* Ensure we reference a volatile object even if value is ignored, but
6493 don't do this if all we are doing is taking its address. */
6494 if (TREE_THIS_VOLATILE (exp)
6495 && TREE_CODE (exp) != FUNCTION_DECL
6496 && mode != VOIDmode && mode != BLKmode
6497 && modifier != EXPAND_CONST_ADDRESS)
6499 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6500 if (GET_CODE (temp) == MEM)
6501 temp = copy_to_reg (temp);
6505 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6506 || code == INDIRECT_REF || code == BUFFER_REF)
6507 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6510 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6511 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6514 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6517 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6518 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6519 /* If the second operand has no side effects, just evaluate
6521 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6523 else if (code == BIT_FIELD_REF)
6525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6526 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6527 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6534 #ifdef MAX_INTEGER_COMPUTATION_MODE
6535 /* Only check stuff here if the mode we want is different from the mode
6536 of the expression; if it's the same, check_max_integer_computation_mode
6537 will handle it. Do we really need to check this stuff at all? */
6540 && GET_MODE (target) != mode
6541 && TREE_CODE (exp) != INTEGER_CST
6542 && TREE_CODE (exp) != PARM_DECL
6543 && TREE_CODE (exp) != ARRAY_REF
6544 && TREE_CODE (exp) != ARRAY_RANGE_REF
6545 && TREE_CODE (exp) != COMPONENT_REF
6546 && TREE_CODE (exp) != BIT_FIELD_REF
6547 && TREE_CODE (exp) != INDIRECT_REF
6548 && TREE_CODE (exp) != CALL_EXPR
6549 && TREE_CODE (exp) != VAR_DECL
6550 && TREE_CODE (exp) != RTL_EXPR)
6552 enum machine_mode mode = GET_MODE (target);
6554 if (GET_MODE_CLASS (mode) == MODE_INT
6555 && mode > MAX_INTEGER_COMPUTATION_MODE)
6556 internal_error ("unsupported wide integer operation");
6560 && TREE_CODE (exp) != INTEGER_CST
6561 && TREE_CODE (exp) != PARM_DECL
6562 && TREE_CODE (exp) != ARRAY_REF
6563 && TREE_CODE (exp) != ARRAY_RANGE_REF
6564 && TREE_CODE (exp) != COMPONENT_REF
6565 && TREE_CODE (exp) != BIT_FIELD_REF
6566 && TREE_CODE (exp) != INDIRECT_REF
6567 && TREE_CODE (exp) != VAR_DECL
6568 && TREE_CODE (exp) != CALL_EXPR
6569 && TREE_CODE (exp) != RTL_EXPR
6570 && GET_MODE_CLASS (tmode) == MODE_INT
6571 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6572 internal_error ("unsupported wide integer operation");
6574 check_max_integer_computation_mode (exp);
6577 /* If will do cse, generate all results into pseudo registers
6578 since 1) that allows cse to find more things
6579 and 2) otherwise cse could produce an insn the machine
6580 cannot support. An exception is a CONSTRUCTOR into a multi-word
6581 MEM: that's much more likely to be most efficient into the MEM.
6582 Another is a CALL_EXPR which must return in memory. */
6584 if (! cse_not_expected && mode != BLKmode && target
6585 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6586 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6587 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6594 tree function = decl_function_context (exp);
6595 /* Handle using a label in a containing function. */
6596 if (function != current_function_decl
6597 && function != inline_function_decl && function != 0)
6599 struct function *p = find_function_data (function);
6600 p->expr->x_forced_labels
6601 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6602 p->expr->x_forced_labels);
6606 if (modifier == EXPAND_INITIALIZER)
6607 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6612 temp = gen_rtx_MEM (FUNCTION_MODE,
6613 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6614 if (function != current_function_decl
6615 && function != inline_function_decl && function != 0)
6616 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6621 if (!DECL_RTL_SET_P (exp))
6623 error_with_decl (exp, "prior parameter's size depends on `%s'");
6624 return CONST0_RTX (mode);
6627 /* ... fall through ... */
6630 /* If a static var's type was incomplete when the decl was written,
6631 but the type is complete now, lay out the decl now. */
6632 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6633 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6635 rtx value = DECL_RTL_IF_SET (exp);
6637 layout_decl (exp, 0);
6639 /* If the RTL was already set, update its mode and memory
6643 PUT_MODE (value, DECL_MODE (exp));
6644 SET_DECL_RTL (exp, 0);
6645 set_mem_attributes (value, exp, 1);
6646 SET_DECL_RTL (exp, value);
6650 /* ... fall through ... */
6654 if (DECL_RTL (exp) == 0)
6657 /* Ensure variable marked as used even if it doesn't go through
6658 a parser. If it hasn't be used yet, write out an external
6660 if (! TREE_USED (exp))
6662 assemble_external (exp);
6663 TREE_USED (exp) = 1;
6666 /* Show we haven't gotten RTL for this yet. */
6669 /* Handle variables inherited from containing functions. */
6670 context = decl_function_context (exp);
6672 /* We treat inline_function_decl as an alias for the current function
6673 because that is the inline function whose vars, types, etc.
6674 are being merged into the current function.
6675 See expand_inline_function. */
6677 if (context != 0 && context != current_function_decl
6678 && context != inline_function_decl
6679 /* If var is static, we don't need a static chain to access it. */
6680 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6681 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6685 /* Mark as non-local and addressable. */
6686 DECL_NONLOCAL (exp) = 1;
6687 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6689 (*lang_hooks.mark_addressable) (exp);
6690 if (GET_CODE (DECL_RTL (exp)) != MEM)
6692 addr = XEXP (DECL_RTL (exp), 0);
6693 if (GET_CODE (addr) == MEM)
6695 = replace_equiv_address (addr,
6696 fix_lexical_addr (XEXP (addr, 0), exp));
6698 addr = fix_lexical_addr (addr, exp);
6700 temp = replace_equiv_address (DECL_RTL (exp), addr);
6703 /* This is the case of an array whose size is to be determined
6704 from its initializer, while the initializer is still being parsed.
6707 else if (GET_CODE (DECL_RTL (exp)) == MEM
6708 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6709 temp = validize_mem (DECL_RTL (exp));
6711 /* If DECL_RTL is memory, we are in the normal case and either
6712 the address is not valid or it is not a register and -fforce-addr
6713 is specified, get the address into a register. */
6715 else if (GET_CODE (DECL_RTL (exp)) == MEM
6716 && modifier != EXPAND_CONST_ADDRESS
6717 && modifier != EXPAND_SUM
6718 && modifier != EXPAND_INITIALIZER
6719 && (! memory_address_p (DECL_MODE (exp),
6720 XEXP (DECL_RTL (exp), 0))
6722 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6723 temp = replace_equiv_address (DECL_RTL (exp),
6724 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6726 /* If we got something, return it. But first, set the alignment
6727 if the address is a register. */
6730 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6731 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6736 /* If the mode of DECL_RTL does not match that of the decl, it
6737 must be a promoted value. We return a SUBREG of the wanted mode,
6738 but mark it so that we know that it was already extended. */
6740 if (GET_CODE (DECL_RTL (exp)) == REG
6741 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6743 /* Get the signedness used for this variable. Ensure we get the
6744 same mode we got when the variable was declared. */
6745 if (GET_MODE (DECL_RTL (exp))
6746 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6747 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6750 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6751 SUBREG_PROMOTED_VAR_P (temp) = 1;
6752 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6756 return DECL_RTL (exp);
6759 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6760 TREE_INT_CST_HIGH (exp), mode);
6762 /* ??? If overflow is set, fold will have done an incomplete job,
6763 which can result in (plus xx (const_int 0)), which can get
6764 simplified by validate_replace_rtx during virtual register
6765 instantiation, which can result in unrecognizable insns.
6766 Avoid this by forcing all overflows into registers. */
6767 if (TREE_CONSTANT_OVERFLOW (exp)
6768 && modifier != EXPAND_INITIALIZER)
6769 temp = force_reg (mode, temp);
6774 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6777 /* If optimized, generate immediate CONST_DOUBLE
6778 which will be turned into memory by reload if necessary.
6780 We used to force a register so that loop.c could see it. But
6781 this does not allow gen_* patterns to perform optimizations with
6782 the constants. It also produces two insns in cases like "x = 1.0;".
6783 On most machines, floating-point constants are not permitted in
6784 many insns, so we'd end up copying it to a register in any case.
6786 Now, we do the copying in expand_binop, if appropriate. */
6787 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6788 TYPE_MODE (TREE_TYPE (exp)));
6792 if (! TREE_CST_RTL (exp))
6793 output_constant_def (exp, 1);
6795 /* TREE_CST_RTL probably contains a constant address.
6796 On RISC machines where a constant address isn't valid,
6797 make some insns to get that address into a register. */
6798 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6799 && modifier != EXPAND_CONST_ADDRESS
6800 && modifier != EXPAND_INITIALIZER
6801 && modifier != EXPAND_SUM
6802 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6804 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6805 return replace_equiv_address (TREE_CST_RTL (exp),
6806 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6807 return TREE_CST_RTL (exp);
6809 case EXPR_WITH_FILE_LOCATION:
6812 const char *saved_input_filename = input_filename;
6813 int saved_lineno = lineno;
6814 input_filename = EXPR_WFL_FILENAME (exp);
6815 lineno = EXPR_WFL_LINENO (exp);
6816 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6817 emit_line_note (input_filename, lineno);
6818 /* Possibly avoid switching back and forth here. */
6819 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6820 input_filename = saved_input_filename;
6821 lineno = saved_lineno;
6826 context = decl_function_context (exp);
6828 /* If this SAVE_EXPR was at global context, assume we are an
6829 initialization function and move it into our context. */
6831 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6833 /* We treat inline_function_decl as an alias for the current function
6834 because that is the inline function whose vars, types, etc.
6835 are being merged into the current function.
6836 See expand_inline_function. */
6837 if (context == current_function_decl || context == inline_function_decl)
6840 /* If this is non-local, handle it. */
6843 /* The following call just exists to abort if the context is
6844 not of a containing function. */
6845 find_function_data (context);
6847 temp = SAVE_EXPR_RTL (exp);
6848 if (temp && GET_CODE (temp) == REG)
6850 put_var_into_stack (exp);
6851 temp = SAVE_EXPR_RTL (exp);
6853 if (temp == 0 || GET_CODE (temp) != MEM)
6856 replace_equiv_address (temp,
6857 fix_lexical_addr (XEXP (temp, 0), exp));
6859 if (SAVE_EXPR_RTL (exp) == 0)
6861 if (mode == VOIDmode)
6864 temp = assign_temp (build_qualified_type (type,
6866 | TYPE_QUAL_CONST)),
6869 SAVE_EXPR_RTL (exp) = temp;
6870 if (!optimize && GET_CODE (temp) == REG)
6871 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6874 /* If the mode of TEMP does not match that of the expression, it
6875 must be a promoted value. We pass store_expr a SUBREG of the
6876 wanted mode but mark it so that we know that it was already
6877 extended. Note that `unsignedp' was modified above in
6880 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6882 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6883 SUBREG_PROMOTED_VAR_P (temp) = 1;
6884 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6887 if (temp == const0_rtx)
6888 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6890 store_expr (TREE_OPERAND (exp, 0), temp,
6891 modifier == EXPAND_STACK_PARM ? 2 : 0);
6893 TREE_USED (exp) = 1;
6896 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6897 must be a promoted value. We return a SUBREG of the wanted mode,
6898 but mark it so that we know that it was already extended. */
6900 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6901 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6903 /* Compute the signedness and make the proper SUBREG. */
6904 promote_mode (type, mode, &unsignedp, 0);
6905 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6906 SUBREG_PROMOTED_VAR_P (temp) = 1;
6907 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6911 return SAVE_EXPR_RTL (exp);
6916 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6917 TREE_OPERAND (exp, 0)
6918 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6922 case PLACEHOLDER_EXPR:
6924 tree old_list = placeholder_list;
6925 tree placeholder_expr = 0;
6927 exp = find_placeholder (exp, &placeholder_expr);
6931 placeholder_list = TREE_CHAIN (placeholder_expr);
6932 temp = expand_expr (exp, original_target, tmode, modifier);
6933 placeholder_list = old_list;
6937 case WITH_RECORD_EXPR:
6938 /* Put the object on the placeholder list, expand our first operand,
6939 and pop the list. */
6940 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6942 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6944 placeholder_list = TREE_CHAIN (placeholder_list);
6948 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6949 expand_goto (TREE_OPERAND (exp, 0));
6951 expand_computed_goto (TREE_OPERAND (exp, 0));
6955 expand_exit_loop_if_false (NULL,
6956 invert_truthvalue (TREE_OPERAND (exp, 0)));
6959 case LABELED_BLOCK_EXPR:
6960 if (LABELED_BLOCK_BODY (exp))
6961 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6962 /* Should perhaps use expand_label, but this is simpler and safer. */
6963 do_pending_stack_adjust ();
6964 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6967 case EXIT_BLOCK_EXPR:
6968 if (EXIT_BLOCK_RETURN (exp))
6969 sorry ("returned value in block_exit_expr");
6970 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6975 expand_start_loop (1);
6976 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6984 tree vars = TREE_OPERAND (exp, 0);
6986 /* Need to open a binding contour here because
6987 if there are any cleanups they must be contained here. */
6988 expand_start_bindings (2);
6990 /* Mark the corresponding BLOCK for output in its proper place. */
6991 if (TREE_OPERAND (exp, 2) != 0
6992 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6993 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6995 /* If VARS have not yet been expanded, expand them now. */
6998 if (!DECL_RTL_SET_P (vars))
7000 expand_decl_init (vars);
7001 vars = TREE_CHAIN (vars);
7004 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7006 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7012 if (RTL_EXPR_SEQUENCE (exp))
7014 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7016 emit_insn (RTL_EXPR_SEQUENCE (exp));
7017 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7019 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7020 free_temps_for_rtl_expr (exp);
7021 return RTL_EXPR_RTL (exp);
7024 /* If we don't need the result, just ensure we evaluate any
7030 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7031 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7036 /* All elts simple constants => refer to a constant in memory. But
7037 if this is a non-BLKmode mode, let it store a field at a time
7038 since that should make a CONST_INT or CONST_DOUBLE when we
7039 fold. Likewise, if we have a target we can use, it is best to
7040 store directly into the target unless the type is large enough
7041 that memcpy will be used. If we are making an initializer and
7042 all operands are constant, put it in memory as well.
7044 FIXME: Avoid trying to fill vector constructors piece-meal.
7045 Output them with output_constant_def below unless we're sure
7046 they're zeros. This should go away when vector initializers
7047 are treated like VECTOR_CST instead of arrays.
7049 else if ((TREE_STATIC (exp)
7050 && ((mode == BLKmode
7051 && ! (target != 0 && safe_from_p (target, exp, 1)))
7052 || TREE_ADDRESSABLE (exp)
7053 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7054 && (! MOVE_BY_PIECES_P
7055 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7057 && ((TREE_CODE (type) == VECTOR_TYPE
7058 && !is_zeros_p (exp))
7059 || ! mostly_zeros_p (exp)))))
7060 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7062 rtx constructor = output_constant_def (exp, 1);
7064 if (modifier != EXPAND_CONST_ADDRESS
7065 && modifier != EXPAND_INITIALIZER
7066 && modifier != EXPAND_SUM)
7067 constructor = validize_mem (constructor);
7073 /* Handle calls that pass values in multiple non-contiguous
7074 locations. The Irix 6 ABI has examples of this. */
7075 if (target == 0 || ! safe_from_p (target, exp, 1)
7076 || GET_CODE (target) == PARALLEL
7077 || modifier == EXPAND_STACK_PARM)
7079 = assign_temp (build_qualified_type (type,
7081 | (TREE_READONLY (exp)
7082 * TYPE_QUAL_CONST))),
7083 0, TREE_ADDRESSABLE (exp), 1);
7085 store_constructor (exp, target, 0, int_expr_size (exp));
7091 tree exp1 = TREE_OPERAND (exp, 0);
7093 tree string = string_constant (exp1, &index);
7095 /* Try to optimize reads from const strings. */
7097 && TREE_CODE (string) == STRING_CST
7098 && TREE_CODE (index) == INTEGER_CST
7099 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7100 && GET_MODE_CLASS (mode) == MODE_INT
7101 && GET_MODE_SIZE (mode) == 1
7102 && modifier != EXPAND_WRITE)
7103 return gen_int_mode (TREE_STRING_POINTER (string)
7104 [TREE_INT_CST_LOW (index)], mode);
7106 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7107 op0 = memory_address (mode, op0);
7108 temp = gen_rtx_MEM (mode, op0);
7109 set_mem_attributes (temp, exp, 0);
7111 /* If we are writing to this object and its type is a record with
7112 readonly fields, we must mark it as readonly so it will
7113 conflict with readonly references to those fields. */
7114 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7115 RTX_UNCHANGING_P (temp) = 1;
7121 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7125 tree array = TREE_OPERAND (exp, 0);
7126 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7127 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7128 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7131 /* Optimize the special-case of a zero lower bound.
7133 We convert the low_bound to sizetype to avoid some problems
7134 with constant folding. (E.g. suppose the lower bound is 1,
7135 and its mode is QI. Without the conversion, (ARRAY
7136 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7137 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7139 if (! integer_zerop (low_bound))
7140 index = size_diffop (index, convert (sizetype, low_bound));
7142 /* Fold an expression like: "foo"[2].
7143 This is not done in fold so it won't happen inside &.
7144 Don't fold if this is for wide characters since it's too
7145 difficult to do correctly and this is a very rare case. */
7147 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7148 && TREE_CODE (array) == STRING_CST
7149 && TREE_CODE (index) == INTEGER_CST
7150 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7151 && GET_MODE_CLASS (mode) == MODE_INT
7152 && GET_MODE_SIZE (mode) == 1)
7153 return gen_int_mode (TREE_STRING_POINTER (array)
7154 [TREE_INT_CST_LOW (index)], mode);
7156 /* If this is a constant index into a constant array,
7157 just get the value from the array. Handle both the cases when
7158 we have an explicit constructor and when our operand is a variable
7159 that was declared const. */
7161 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7162 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7163 && TREE_CODE (index) == INTEGER_CST
7164 && 0 > compare_tree_int (index,
7165 list_length (CONSTRUCTOR_ELTS
7166 (TREE_OPERAND (exp, 0)))))
7170 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7171 i = TREE_INT_CST_LOW (index);
7172 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7176 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7180 else if (optimize >= 1
7181 && modifier != EXPAND_CONST_ADDRESS
7182 && modifier != EXPAND_INITIALIZER
7183 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7184 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7185 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7187 if (TREE_CODE (index) == INTEGER_CST)
7189 tree init = DECL_INITIAL (array);
7191 if (TREE_CODE (init) == CONSTRUCTOR)
7195 for (elem = CONSTRUCTOR_ELTS (init);
7197 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7198 elem = TREE_CHAIN (elem))
7201 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7202 return expand_expr (fold (TREE_VALUE (elem)), target,
7205 else if (TREE_CODE (init) == STRING_CST
7206 && 0 > compare_tree_int (index,
7207 TREE_STRING_LENGTH (init)))
7209 tree type = TREE_TYPE (TREE_TYPE (init));
7210 enum machine_mode mode = TYPE_MODE (type);
7212 if (GET_MODE_CLASS (mode) == MODE_INT
7213 && GET_MODE_SIZE (mode) == 1)
7214 return gen_int_mode (TREE_STRING_POINTER (init)
7215 [TREE_INT_CST_LOW (index)], mode);
7224 case ARRAY_RANGE_REF:
7225 /* If the operand is a CONSTRUCTOR, we can just extract the
7226 appropriate field if it is present. Don't do this if we have
7227 already written the data since we want to refer to that copy
7228 and varasm.c assumes that's what we'll do. */
7229 if (code == COMPONENT_REF
7230 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7231 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7235 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7236 elt = TREE_CHAIN (elt))
7237 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7238 /* We can normally use the value of the field in the
7239 CONSTRUCTOR. However, if this is a bitfield in
7240 an integral mode that we can fit in a HOST_WIDE_INT,
7241 we must mask only the number of bits in the bitfield,
7242 since this is done implicitly by the constructor. If
7243 the bitfield does not meet either of those conditions,
7244 we can't do this optimization. */
7245 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7246 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7248 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7249 <= HOST_BITS_PER_WIDE_INT))))
7251 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7252 && modifier == EXPAND_STACK_PARM)
7254 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7255 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7257 HOST_WIDE_INT bitsize
7258 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7259 enum machine_mode imode
7260 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7262 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7264 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7265 op0 = expand_and (imode, op0, op1, target);
7270 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7273 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7275 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7285 enum machine_mode mode1;
7286 HOST_WIDE_INT bitsize, bitpos;
7289 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7290 &mode1, &unsignedp, &volatilep);
7293 /* If we got back the original object, something is wrong. Perhaps
7294 we are evaluating an expression too early. In any event, don't
7295 infinitely recurse. */
7299 /* If TEM's type is a union of variable size, pass TARGET to the inner
7300 computation, since it will need a temporary and TARGET is known
7301 to have to do. This occurs in unchecked conversion in Ada. */
7305 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7306 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7308 && modifier != EXPAND_STACK_PARM
7309 ? target : NULL_RTX),
7311 (modifier == EXPAND_INITIALIZER
7312 || modifier == EXPAND_CONST_ADDRESS
7313 || modifier == EXPAND_STACK_PARM)
7314 ? modifier : EXPAND_NORMAL);
7316 /* If this is a constant, put it into a register if it is a
7317 legitimate constant and OFFSET is 0 and memory if it isn't. */
7318 if (CONSTANT_P (op0))
7320 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7321 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7323 op0 = force_reg (mode, op0);
7325 op0 = validize_mem (force_const_mem (mode, op0));
7330 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7333 /* If this object is in a register, put it into memory.
7334 This case can't occur in C, but can in Ada if we have
7335 unchecked conversion of an expression from a scalar type to
7336 an array or record type. */
7337 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7338 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7340 /* If the operand is a SAVE_EXPR, we can deal with this by
7341 forcing the SAVE_EXPR into memory. */
7342 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7344 put_var_into_stack (TREE_OPERAND (exp, 0));
7345 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7350 = build_qualified_type (TREE_TYPE (tem),
7351 (TYPE_QUALS (TREE_TYPE (tem))
7352 | TYPE_QUAL_CONST));
7353 rtx memloc = assign_temp (nt, 1, 1, 1);
7355 emit_move_insn (memloc, op0);
7360 if (GET_CODE (op0) != MEM)
7363 #ifdef POINTERS_EXTEND_UNSIGNED
7364 if (GET_MODE (offset_rtx) != Pmode)
7365 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7367 if (GET_MODE (offset_rtx) != ptr_mode)
7368 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7371 /* A constant address in OP0 can have VOIDmode, we must not try
7372 to call force_reg for that case. Avoid that case. */
7373 if (GET_CODE (op0) == MEM
7374 && GET_MODE (op0) == BLKmode
7375 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7377 && (bitpos % bitsize) == 0
7378 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7379 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7381 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7385 op0 = offset_address (op0, offset_rtx,
7386 highest_pow2_factor (offset));
7389 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7390 record its alignment as BIGGEST_ALIGNMENT. */
7391 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7392 && is_aligning_offset (offset, tem))
7393 set_mem_align (op0, BIGGEST_ALIGNMENT);
7395 /* Don't forget about volatility even if this is a bitfield. */
7396 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7398 if (op0 == orig_op0)
7399 op0 = copy_rtx (op0);
7401 MEM_VOLATILE_P (op0) = 1;
7404 /* The following code doesn't handle CONCAT.
7405 Assume only bitpos == 0 can be used for CONCAT, due to
7406 one element arrays having the same mode as its element. */
7407 if (GET_CODE (op0) == CONCAT)
7409 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7414 /* In cases where an aligned union has an unaligned object
7415 as a field, we might be extracting a BLKmode value from
7416 an integer-mode (e.g., SImode) object. Handle this case
7417 by doing the extract into an object as wide as the field
7418 (which we know to be the width of a basic mode), then
7419 storing into memory, and changing the mode to BLKmode. */
7420 if (mode1 == VOIDmode
7421 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7422 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7423 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7424 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7425 && modifier != EXPAND_CONST_ADDRESS
7426 && modifier != EXPAND_INITIALIZER)
7427 /* If the field isn't aligned enough to fetch as a memref,
7428 fetch it as a bit field. */
7429 || (mode1 != BLKmode
7430 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7431 && ((TYPE_ALIGN (TREE_TYPE (tem))
7432 < GET_MODE_ALIGNMENT (mode))
7433 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7434 /* If the type and the field are a constant size and the
7435 size of the type isn't the same size as the bitfield,
7436 we must use bitfield operations. */
7438 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7440 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7443 enum machine_mode ext_mode = mode;
7445 if (ext_mode == BLKmode
7446 && ! (target != 0 && GET_CODE (op0) == MEM
7447 && GET_CODE (target) == MEM
7448 && bitpos % BITS_PER_UNIT == 0))
7449 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7451 if (ext_mode == BLKmode)
7453 /* In this case, BITPOS must start at a byte boundary and
7454 TARGET, if specified, must be a MEM. */
7455 if (GET_CODE (op0) != MEM
7456 || (target != 0 && GET_CODE (target) != MEM)
7457 || bitpos % BITS_PER_UNIT != 0)
7460 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7462 target = assign_temp (type, 0, 1, 1);
7464 emit_block_move (target, op0,
7465 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7467 (modifier == EXPAND_STACK_PARM
7468 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7473 op0 = validize_mem (op0);
7475 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7476 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7478 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7479 (modifier == EXPAND_STACK_PARM
7480 ? NULL_RTX : target),
7482 int_size_in_bytes (TREE_TYPE (tem)));
7484 /* If the result is a record type and BITSIZE is narrower than
7485 the mode of OP0, an integral mode, and this is a big endian
7486 machine, we must put the field into the high-order bits. */
7487 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7488 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7489 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7490 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7491 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7495 if (mode == BLKmode)
7497 rtx new = assign_temp (build_qualified_type
7498 ((*lang_hooks.types.type_for_mode)
7500 TYPE_QUAL_CONST), 0, 1, 1);
7502 emit_move_insn (new, op0);
7503 op0 = copy_rtx (new);
7504 PUT_MODE (op0, BLKmode);
7505 set_mem_attributes (op0, exp, 1);
7511 /* If the result is BLKmode, use that to access the object
7513 if (mode == BLKmode)
7516 /* Get a reference to just this component. */
7517 if (modifier == EXPAND_CONST_ADDRESS
7518 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7519 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7521 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7523 if (op0 == orig_op0)
7524 op0 = copy_rtx (op0);
7526 set_mem_attributes (op0, exp, 0);
7527 if (GET_CODE (XEXP (op0, 0)) == REG)
7528 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7530 MEM_VOLATILE_P (op0) |= volatilep;
7531 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7532 || modifier == EXPAND_CONST_ADDRESS
7533 || modifier == EXPAND_INITIALIZER)
7535 else if (target == 0)
7536 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7538 convert_move (target, op0, unsignedp);
7544 rtx insn, before = get_last_insn (), vtbl_ref;
7546 /* Evaluate the interior expression. */
7547 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7550 /* Get or create an instruction off which to hang a note. */
7551 if (REG_P (subtarget))
7554 insn = get_last_insn ();
7557 if (! INSN_P (insn))
7558 insn = prev_nonnote_insn (insn);
7562 target = gen_reg_rtx (GET_MODE (subtarget));
7563 insn = emit_move_insn (target, subtarget);
7566 /* Collect the data for the note. */
7567 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7568 vtbl_ref = plus_constant (vtbl_ref,
7569 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7570 /* Discard the initial CONST that was added. */
7571 vtbl_ref = XEXP (vtbl_ref, 0);
7574 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7579 /* Intended for a reference to a buffer of a file-object in Pascal.
7580 But it's not certain that a special tree code will really be
7581 necessary for these. INDIRECT_REF might work for them. */
7587 /* Pascal set IN expression.
7590 rlo = set_low - (set_low%bits_per_word);
7591 the_word = set [ (index - rlo)/bits_per_word ];
7592 bit_index = index % bits_per_word;
7593 bitmask = 1 << bit_index;
7594 return !!(the_word & bitmask); */
7596 tree set = TREE_OPERAND (exp, 0);
7597 tree index = TREE_OPERAND (exp, 1);
7598 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7599 tree set_type = TREE_TYPE (set);
7600 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7601 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7602 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7603 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7604 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7605 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7606 rtx setaddr = XEXP (setval, 0);
7607 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7609 rtx diff, quo, rem, addr, bit, result;
7611 /* If domain is empty, answer is no. Likewise if index is constant
7612 and out of bounds. */
7613 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7614 && TREE_CODE (set_low_bound) == INTEGER_CST
7615 && tree_int_cst_lt (set_high_bound, set_low_bound))
7616 || (TREE_CODE (index) == INTEGER_CST
7617 && TREE_CODE (set_low_bound) == INTEGER_CST
7618 && tree_int_cst_lt (index, set_low_bound))
7619 || (TREE_CODE (set_high_bound) == INTEGER_CST
7620 && TREE_CODE (index) == INTEGER_CST
7621 && tree_int_cst_lt (set_high_bound, index))))
7625 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7627 /* If we get here, we have to generate the code for both cases
7628 (in range and out of range). */
7630 op0 = gen_label_rtx ();
7631 op1 = gen_label_rtx ();
7633 if (! (GET_CODE (index_val) == CONST_INT
7634 && GET_CODE (lo_r) == CONST_INT))
7635 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7636 GET_MODE (index_val), iunsignedp, op1);
7638 if (! (GET_CODE (index_val) == CONST_INT
7639 && GET_CODE (hi_r) == CONST_INT))
7640 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7641 GET_MODE (index_val), iunsignedp, op1);
7643 /* Calculate the element number of bit zero in the first word
7645 if (GET_CODE (lo_r) == CONST_INT)
7646 rlow = GEN_INT (INTVAL (lo_r)
7647 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7649 rlow = expand_binop (index_mode, and_optab, lo_r,
7650 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7651 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7653 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7654 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7656 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7657 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7658 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7659 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7661 addr = memory_address (byte_mode,
7662 expand_binop (index_mode, add_optab, diff,
7663 setaddr, NULL_RTX, iunsignedp,
7666 /* Extract the bit we want to examine. */
7667 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7668 gen_rtx_MEM (byte_mode, addr),
7669 make_tree (TREE_TYPE (index), rem),
7671 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7672 GET_MODE (target) == byte_mode ? target : 0,
7673 1, OPTAB_LIB_WIDEN);
7675 if (result != target)
7676 convert_move (target, result, 1);
7678 /* Output the code to handle the out-of-range case. */
7681 emit_move_insn (target, const0_rtx);
7686 case WITH_CLEANUP_EXPR:
7687 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7689 WITH_CLEANUP_EXPR_RTL (exp)
7690 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7691 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7692 CLEANUP_EH_ONLY (exp));
7694 /* That's it for this cleanup. */
7695 TREE_OPERAND (exp, 1) = 0;
7697 return WITH_CLEANUP_EXPR_RTL (exp);
7699 case CLEANUP_POINT_EXPR:
7701 /* Start a new binding layer that will keep track of all cleanup
7702 actions to be performed. */
7703 expand_start_bindings (2);
7705 target_temp_slot_level = temp_slot_level;
7707 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7708 /* If we're going to use this value, load it up now. */
7710 op0 = force_not_mem (op0);
7711 preserve_temp_slots (op0);
7712 expand_end_bindings (NULL_TREE, 0, 0);
7717 /* Check for a built-in function. */
7718 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7719 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7721 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7723 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7724 == BUILT_IN_FRONTEND)
7725 return (*lang_hooks.expand_expr) (exp, original_target,
7728 return expand_builtin (exp, target, subtarget, tmode, ignore);
7731 return expand_call (exp, target, ignore);
7733 case NON_LVALUE_EXPR:
7736 case REFERENCE_EXPR:
7737 if (TREE_OPERAND (exp, 0) == error_mark_node)
7740 if (TREE_CODE (type) == UNION_TYPE)
7742 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7744 /* If both input and output are BLKmode, this conversion isn't doing
7745 anything except possibly changing memory attribute. */
7746 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7748 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7751 result = copy_rtx (result);
7752 set_mem_attributes (result, exp, 0);
7757 target = assign_temp (type, 0, 1, 1);
7759 if (GET_CODE (target) == MEM)
7760 /* Store data into beginning of memory target. */
7761 store_expr (TREE_OPERAND (exp, 0),
7762 adjust_address (target, TYPE_MODE (valtype), 0),
7763 modifier == EXPAND_STACK_PARM ? 2 : 0);
7765 else if (GET_CODE (target) == REG)
7766 /* Store this field into a union of the proper type. */
7767 store_field (target,
7768 MIN ((int_size_in_bytes (TREE_TYPE
7769 (TREE_OPERAND (exp, 0)))
7771 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7772 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7773 VOIDmode, 0, type, 0);
7777 /* Return the entire union. */
7781 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7786 /* If the signedness of the conversion differs and OP0 is
7787 a promoted SUBREG, clear that indication since we now
7788 have to do the proper extension. */
7789 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7790 && GET_CODE (op0) == SUBREG)
7791 SUBREG_PROMOTED_VAR_P (op0) = 0;
7796 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7797 if (GET_MODE (op0) == mode)
7800 /* If OP0 is a constant, just convert it into the proper mode. */
7801 if (CONSTANT_P (op0))
7803 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7804 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7806 if (modifier == EXPAND_INITIALIZER)
7807 return simplify_gen_subreg (mode, op0, inner_mode,
7808 subreg_lowpart_offset (mode,
7811 return convert_modes (mode, inner_mode, op0,
7812 TREE_UNSIGNED (inner_type));
7815 if (modifier == EXPAND_INITIALIZER)
7816 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7820 convert_to_mode (mode, op0,
7821 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7823 convert_move (target, op0,
7824 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7827 case VIEW_CONVERT_EXPR:
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7830 /* If the input and output modes are both the same, we are done.
7831 Otherwise, if neither mode is BLKmode and both are within a word, we
7832 can use gen_lowpart. If neither is true, make sure the operand is
7833 in memory and convert the MEM to the new mode. */
7834 if (TYPE_MODE (type) == GET_MODE (op0))
7836 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7837 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7838 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7839 op0 = gen_lowpart (TYPE_MODE (type), op0);
7840 else if (GET_CODE (op0) != MEM)
7842 /* If the operand is not a MEM, force it into memory. Since we
7843 are going to be be changing the mode of the MEM, don't call
7844 force_const_mem for constants because we don't allow pool
7845 constants to change mode. */
7846 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7848 if (TREE_ADDRESSABLE (exp))
7851 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7853 = assign_stack_temp_for_type
7854 (TYPE_MODE (inner_type),
7855 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7857 emit_move_insn (target, op0);
7861 /* At this point, OP0 is in the correct mode. If the output type is such
7862 that the operand is known to be aligned, indicate that it is.
7863 Otherwise, we need only be concerned about alignment for non-BLKmode
7865 if (GET_CODE (op0) == MEM)
7867 op0 = copy_rtx (op0);
7869 if (TYPE_ALIGN_OK (type))
7870 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7871 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7872 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7874 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7875 HOST_WIDE_INT temp_size
7876 = MAX (int_size_in_bytes (inner_type),
7877 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7878 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7879 temp_size, 0, type);
7880 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7882 if (TREE_ADDRESSABLE (exp))
7885 if (GET_MODE (op0) == BLKmode)
7886 emit_block_move (new_with_op0_mode, op0,
7887 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7888 (modifier == EXPAND_STACK_PARM
7889 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7891 emit_move_insn (new_with_op0_mode, op0);
7896 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7902 this_optab = ! unsignedp && flag_trapv
7903 && (GET_MODE_CLASS (mode) == MODE_INT)
7904 ? addv_optab : add_optab;
7906 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7907 something else, make sure we add the register to the constant and
7908 then to the other thing. This case can occur during strength
7909 reduction and doing it this way will produce better code if the
7910 frame pointer or argument pointer is eliminated.
7912 fold-const.c will ensure that the constant is always in the inner
7913 PLUS_EXPR, so the only case we need to do anything about is if
7914 sp, ap, or fp is our second argument, in which case we must swap
7915 the innermost first argument and our second argument. */
7917 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7918 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7919 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7920 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7921 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7922 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7924 tree t = TREE_OPERAND (exp, 1);
7926 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7927 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7930 /* If the result is to be ptr_mode and we are adding an integer to
7931 something, we might be forming a constant. So try to use
7932 plus_constant. If it produces a sum and we can't accept it,
7933 use force_operand. This allows P = &ARR[const] to generate
7934 efficient code on machines where a SYMBOL_REF is not a valid
7937 If this is an EXPAND_SUM call, always return the sum. */
7938 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7939 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7941 if (modifier == EXPAND_STACK_PARM)
7943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7944 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7945 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7949 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7951 /* Use immed_double_const to ensure that the constant is
7952 truncated according to the mode of OP1, then sign extended
7953 to a HOST_WIDE_INT. Using the constant directly can result
7954 in non-canonical RTL in a 64x32 cross compile. */
7956 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7958 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7959 op1 = plus_constant (op1, INTVAL (constant_part));
7960 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7961 op1 = force_operand (op1, target);
7965 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7966 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7967 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7971 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7972 (modifier == EXPAND_INITIALIZER
7973 ? EXPAND_INITIALIZER : EXPAND_SUM));
7974 if (! CONSTANT_P (op0))
7976 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7977 VOIDmode, modifier);
7978 /* Don't go to both_summands if modifier
7979 says it's not right to return a PLUS. */
7980 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7984 /* Use immed_double_const to ensure that the constant is
7985 truncated according to the mode of OP1, then sign extended
7986 to a HOST_WIDE_INT. Using the constant directly can result
7987 in non-canonical RTL in a 64x32 cross compile. */
7989 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7991 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7992 op0 = plus_constant (op0, INTVAL (constant_part));
7993 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7994 op0 = force_operand (op0, target);
7999 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8002 /* No sense saving up arithmetic to be done
8003 if it's all in the wrong mode to form part of an address.
8004 And force_operand won't know whether to sign-extend or
8006 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8007 || mode != ptr_mode)
8009 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8010 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8011 if (op0 == const0_rtx)
8013 if (op1 == const0_rtx)
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8019 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8021 /* We come here from MINUS_EXPR when the second operand is a
8024 /* Make sure any term that's a sum with a constant comes last. */
8025 if (GET_CODE (op0) == PLUS
8026 && CONSTANT_P (XEXP (op0, 1)))
8032 /* If adding to a sum including a constant,
8033 associate it to put the constant outside. */
8034 if (GET_CODE (op1) == PLUS
8035 && CONSTANT_P (XEXP (op1, 1)))
8037 rtx constant_term = const0_rtx;
8039 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8042 /* Ensure that MULT comes first if there is one. */
8043 else if (GET_CODE (op0) == MULT)
8044 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8046 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8048 /* Let's also eliminate constants from op0 if possible. */
8049 op0 = eliminate_constant_term (op0, &constant_term);
8051 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8052 their sum should be a constant. Form it into OP1, since the
8053 result we want will then be OP0 + OP1. */
8055 temp = simplify_binary_operation (PLUS, mode, constant_term,
8060 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8063 /* Put a constant term last and put a multiplication first. */
8064 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8065 temp = op1, op1 = op0, op0 = temp;
8067 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8068 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8071 /* For initializers, we are allowed to return a MINUS of two
8072 symbolic constants. Here we handle all cases when both operands
8074 /* Handle difference of two symbolic constants,
8075 for the sake of an initializer. */
8076 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8077 && really_constant_p (TREE_OPERAND (exp, 0))
8078 && really_constant_p (TREE_OPERAND (exp, 1)))
8080 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8082 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8085 /* If the last operand is a CONST_INT, use plus_constant of
8086 the negated constant. Else make the MINUS. */
8087 if (GET_CODE (op1) == CONST_INT)
8088 return plus_constant (op0, - INTVAL (op1));
8090 return gen_rtx_MINUS (mode, op0, op1);
8093 this_optab = ! unsignedp && flag_trapv
8094 && (GET_MODE_CLASS(mode) == MODE_INT)
8095 ? subv_optab : sub_optab;
8097 /* No sense saving up arithmetic to be done
8098 if it's all in the wrong mode to form part of an address.
8099 And force_operand won't know whether to sign-extend or
8101 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8102 || mode != ptr_mode)
8105 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8109 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8111 /* Convert A - const to A + (-const). */
8112 if (GET_CODE (op1) == CONST_INT)
8114 op1 = negate_rtx (mode, op1);
8121 /* If first operand is constant, swap them.
8122 Thus the following special case checks need only
8123 check the second operand. */
8124 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8126 tree t1 = TREE_OPERAND (exp, 0);
8127 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8128 TREE_OPERAND (exp, 1) = t1;
8131 /* Attempt to return something suitable for generating an
8132 indexed address, for machines that support that. */
8134 if (modifier == EXPAND_SUM && mode == ptr_mode
8135 && host_integerp (TREE_OPERAND (exp, 1), 0))
8137 tree exp1 = TREE_OPERAND (exp, 1);
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8142 /* If we knew for certain that this is arithmetic for an array
8143 reference, and we knew the bounds of the array, then we could
8144 apply the distributive law across (PLUS X C) for constant C.
8145 Without such knowledge, we risk overflowing the computation
8146 when both X and C are large, but X+C isn't. */
8147 /* ??? Could perhaps special-case EXP being unsigned and C being
8148 positive. In that case we are certain that X+C is no smaller
8149 than X and so the transformed expression will overflow iff the
8150 original would have. */
8152 if (GET_CODE (op0) != REG)
8153 op0 = force_operand (op0, NULL_RTX);
8154 if (GET_CODE (op0) != REG)
8155 op0 = copy_to_mode_reg (mode, op0);
8157 return gen_rtx_MULT (mode, op0,
8158 gen_int_mode (tree_low_cst (exp1, 0),
8159 TYPE_MODE (TREE_TYPE (exp1))));
8162 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8165 if (modifier == EXPAND_STACK_PARM)
8168 /* Check for multiplying things that have been extended
8169 from a narrower type. If this machine supports multiplying
8170 in that narrower type with a result in the desired type,
8171 do it that way, and avoid the explicit type-conversion. */
8172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8173 && TREE_CODE (type) == INTEGER_TYPE
8174 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8175 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8176 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8177 && int_fits_type_p (TREE_OPERAND (exp, 1),
8178 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8179 /* Don't use a widening multiply if a shift will do. */
8180 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8181 > HOST_BITS_PER_WIDE_INT)
8182 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8184 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8185 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8187 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8188 /* If both operands are extended, they must either both
8189 be zero-extended or both be sign-extended. */
8190 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8192 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8194 enum machine_mode innermode
8195 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8196 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8197 ? smul_widen_optab : umul_widen_optab);
8198 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8199 ? umul_widen_optab : smul_widen_optab);
8200 if (mode == GET_MODE_WIDER_MODE (innermode))
8202 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8204 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8205 NULL_RTX, VOIDmode, 0);
8206 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8210 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8211 NULL_RTX, VOIDmode, 0);
8214 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8215 && innermode == word_mode)
8218 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8219 NULL_RTX, VOIDmode, 0);
8220 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8221 op1 = convert_modes (innermode, mode,
8222 expand_expr (TREE_OPERAND (exp, 1),
8223 NULL_RTX, VOIDmode, 0),
8226 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8227 NULL_RTX, VOIDmode, 0);
8228 temp = expand_binop (mode, other_optab, op0, op1, target,
8229 unsignedp, OPTAB_LIB_WIDEN);
8230 htem = expand_mult_highpart_adjust (innermode,
8231 gen_highpart (innermode, temp),
8233 gen_highpart (innermode, temp),
8235 emit_move_insn (gen_highpart (innermode, temp), htem);
8240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8241 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8242 return expand_mult (mode, op0, op1, target, unsignedp);
8244 case TRUNC_DIV_EXPR:
8245 case FLOOR_DIV_EXPR:
8247 case ROUND_DIV_EXPR:
8248 case EXACT_DIV_EXPR:
8249 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8251 if (modifier == EXPAND_STACK_PARM)
8253 /* Possible optimization: compute the dividend with EXPAND_SUM
8254 then if the divisor is constant can optimize the case
8255 where some terms of the dividend have coeffs divisible by it. */
8256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8258 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8261 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8262 expensive divide. If not, combine will rebuild the original
8264 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8265 && TREE_CODE (type) == REAL_TYPE
8266 && !real_onep (TREE_OPERAND (exp, 0)))
8267 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8268 build (RDIV_EXPR, type,
8269 build_real (type, dconst1),
8270 TREE_OPERAND (exp, 1))),
8271 target, tmode, modifier);
8272 this_optab = sdiv_optab;
8275 case TRUNC_MOD_EXPR:
8276 case FLOOR_MOD_EXPR:
8278 case ROUND_MOD_EXPR:
8279 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8281 if (modifier == EXPAND_STACK_PARM)
8283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8285 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8287 case FIX_ROUND_EXPR:
8288 case FIX_FLOOR_EXPR:
8290 abort (); /* Not used for C. */
8292 case FIX_TRUNC_EXPR:
8293 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8294 if (target == 0 || modifier == EXPAND_STACK_PARM)
8295 target = gen_reg_rtx (mode);
8296 expand_fix (target, op0, unsignedp);
8300 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8301 if (target == 0 || modifier == EXPAND_STACK_PARM)
8302 target = gen_reg_rtx (mode);
8303 /* expand_float can't figure out what to do if FROM has VOIDmode.
8304 So give it the correct mode. With -O, cse will optimize this. */
8305 if (GET_MODE (op0) == VOIDmode)
8306 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8308 expand_float (target, op0,
8309 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8314 if (modifier == EXPAND_STACK_PARM)
8316 temp = expand_unop (mode,
8317 ! unsignedp && flag_trapv
8318 && (GET_MODE_CLASS(mode) == MODE_INT)
8319 ? negv_optab : neg_optab, op0, target, 0);
8325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8326 if (modifier == EXPAND_STACK_PARM)
8329 /* Handle complex values specially. */
8330 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8331 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8332 return expand_complex_abs (mode, op0, target, unsignedp);
8334 /* Unsigned abs is simply the operand. Testing here means we don't
8335 risk generating incorrect code below. */
8336 if (TREE_UNSIGNED (type))
8339 return expand_abs (mode, op0, target, unsignedp,
8340 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8344 target = original_target;
8346 || modifier == EXPAND_STACK_PARM
8347 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8348 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8349 || GET_MODE (target) != mode
8350 || (GET_CODE (target) == REG
8351 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8352 target = gen_reg_rtx (mode);
8353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8354 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8356 /* First try to do it with a special MIN or MAX instruction.
8357 If that does not win, use a conditional jump to select the proper
8359 this_optab = (TREE_UNSIGNED (type)
8360 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8361 : (code == MIN_EXPR ? smin_optab : smax_optab));
8363 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8368 /* At this point, a MEM target is no longer useful; we will get better
8371 if (GET_CODE (target) == MEM)
8372 target = gen_reg_rtx (mode);
8375 emit_move_insn (target, op0);
8377 op0 = gen_label_rtx ();
8379 /* If this mode is an integer too wide to compare properly,
8380 compare word by word. Rely on cse to optimize constant cases. */
8381 if (GET_MODE_CLASS (mode) == MODE_INT
8382 && ! can_compare_p (GE, mode, ccp_jump))
8384 if (code == MAX_EXPR)
8385 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8386 target, op1, NULL_RTX, op0);
8388 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8389 op1, target, NULL_RTX, op0);
8393 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8394 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8395 unsignedp, mode, NULL_RTX, NULL_RTX,
8398 emit_move_insn (target, op1);
8403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8404 if (modifier == EXPAND_STACK_PARM)
8406 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8413 if (modifier == EXPAND_STACK_PARM)
8415 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8422 temp = expand_unop (mode, clz_optab, op0, target, 1);
8428 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8429 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8436 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8443 temp = expand_unop (mode, parity_optab, op0, target, 1);
8448 /* ??? Can optimize bitwise operations with one arg constant.
8449 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8450 and (a bitwise1 b) bitwise2 b (etc)
8451 but that is probably not worth while. */
8453 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8454 boolean values when we want in all cases to compute both of them. In
8455 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8456 as actual zero-or-1 values and then bitwise anding. In cases where
8457 there cannot be any side effects, better code would be made by
8458 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8459 how to recognize those cases. */
8461 case TRUTH_AND_EXPR:
8463 this_optab = and_optab;
8468 this_optab = ior_optab;
8471 case TRUTH_XOR_EXPR:
8473 this_optab = xor_optab;
8480 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8482 if (modifier == EXPAND_STACK_PARM)
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8485 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8488 /* Could determine the answer when only additive constants differ. Also,
8489 the addition of one can be handled by changing the condition. */
8496 case UNORDERED_EXPR:
8503 temp = do_store_flag (exp,
8504 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8505 tmode != VOIDmode ? tmode : mode, 0);
8509 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8510 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8512 && GET_CODE (original_target) == REG
8513 && (GET_MODE (original_target)
8514 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8516 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8519 /* If temp is constant, we can just compute the result. */
8520 if (GET_CODE (temp) == CONST_INT)
8522 if (INTVAL (temp) != 0)
8523 emit_move_insn (target, const1_rtx);
8525 emit_move_insn (target, const0_rtx);
8530 if (temp != original_target)
8532 enum machine_mode mode1 = GET_MODE (temp);
8533 if (mode1 == VOIDmode)
8534 mode1 = tmode != VOIDmode ? tmode : mode;
8536 temp = copy_to_mode_reg (mode1, temp);
8539 op1 = gen_label_rtx ();
8540 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8541 GET_MODE (temp), unsignedp, op1);
8542 emit_move_insn (temp, const1_rtx);
8547 /* If no set-flag instruction, must generate a conditional
8548 store into a temporary variable. Drop through
8549 and handle this like && and ||. */
8551 case TRUTH_ANDIF_EXPR:
8552 case TRUTH_ORIF_EXPR:
8555 || modifier == EXPAND_STACK_PARM
8556 || ! safe_from_p (target, exp, 1)
8557 /* Make sure we don't have a hard reg (such as function's return
8558 value) live across basic blocks, if not optimizing. */
8559 || (!optimize && GET_CODE (target) == REG
8560 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8561 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8564 emit_clr_insn (target);
8566 op1 = gen_label_rtx ();
8567 jumpifnot (exp, op1);
8570 emit_0_to_1_insn (target);
8573 return ignore ? const0_rtx : target;
8575 case TRUTH_NOT_EXPR:
8576 if (modifier == EXPAND_STACK_PARM)
8578 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8579 /* The parser is careful to generate TRUTH_NOT_EXPR
8580 only with operands that are always zero or one. */
8581 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8582 target, 1, OPTAB_LIB_WIDEN);
8588 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8590 return expand_expr (TREE_OPERAND (exp, 1),
8591 (ignore ? const0_rtx : target),
8592 VOIDmode, modifier);
8595 /* If we would have a "singleton" (see below) were it not for a
8596 conversion in each arm, bring that conversion back out. */
8597 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8598 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8599 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8600 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8602 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8603 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8605 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8606 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8607 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8608 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8609 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8610 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8611 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8612 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8613 return expand_expr (build1 (NOP_EXPR, type,
8614 build (COND_EXPR, TREE_TYPE (iftrue),
8615 TREE_OPERAND (exp, 0),
8617 target, tmode, modifier);
8621 /* Note that COND_EXPRs whose type is a structure or union
8622 are required to be constructed to contain assignments of
8623 a temporary variable, so that we can evaluate them here
8624 for side effect only. If type is void, we must do likewise. */
8626 /* If an arm of the branch requires a cleanup,
8627 only that cleanup is performed. */
8630 tree binary_op = 0, unary_op = 0;
8632 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8633 convert it to our mode, if necessary. */
8634 if (integer_onep (TREE_OPERAND (exp, 1))
8635 && integer_zerop (TREE_OPERAND (exp, 2))
8636 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8640 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8645 if (modifier == EXPAND_STACK_PARM)
8647 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8648 if (GET_MODE (op0) == mode)
8652 target = gen_reg_rtx (mode);
8653 convert_move (target, op0, unsignedp);
8657 /* Check for X ? A + B : A. If we have this, we can copy A to the
8658 output and conditionally add B. Similarly for unary operations.
8659 Don't do this if X has side-effects because those side effects
8660 might affect A or B and the "?" operation is a sequence point in
8661 ANSI. (operand_equal_p tests for side effects.) */
8663 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8664 && operand_equal_p (TREE_OPERAND (exp, 2),
8665 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8666 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8667 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8668 && operand_equal_p (TREE_OPERAND (exp, 1),
8669 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8670 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8671 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8672 && operand_equal_p (TREE_OPERAND (exp, 2),
8673 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8674 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8675 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8676 && operand_equal_p (TREE_OPERAND (exp, 1),
8677 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8678 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8680 /* If we are not to produce a result, we have no target. Otherwise,
8681 if a target was specified use it; it will not be used as an
8682 intermediate target unless it is safe. If no target, use a
8687 else if (modifier == EXPAND_STACK_PARM)
8688 temp = assign_temp (type, 0, 0, 1);
8689 else if (original_target
8690 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8691 || (singleton && GET_CODE (original_target) == REG
8692 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8693 && original_target == var_rtx (singleton)))
8694 && GET_MODE (original_target) == mode
8695 #ifdef HAVE_conditional_move
8696 && (! can_conditionally_move_p (mode)
8697 || GET_CODE (original_target) == REG
8698 || TREE_ADDRESSABLE (type))
8700 && (GET_CODE (original_target) != MEM
8701 || TREE_ADDRESSABLE (type)))
8702 temp = original_target;
8703 else if (TREE_ADDRESSABLE (type))
8706 temp = assign_temp (type, 0, 0, 1);
8708 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8709 do the test of X as a store-flag operation, do this as
8710 A + ((X != 0) << log C). Similarly for other simple binary
8711 operators. Only do for C == 1 if BRANCH_COST is low. */
8712 if (temp && singleton && binary_op
8713 && (TREE_CODE (binary_op) == PLUS_EXPR
8714 || TREE_CODE (binary_op) == MINUS_EXPR
8715 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8716 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8717 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8718 : integer_onep (TREE_OPERAND (binary_op, 1)))
8719 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8723 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8724 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8725 ? addv_optab : add_optab)
8726 : TREE_CODE (binary_op) == MINUS_EXPR
8727 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8728 ? subv_optab : sub_optab)
8729 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8732 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8733 if (singleton == TREE_OPERAND (exp, 1))
8734 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8736 cond = TREE_OPERAND (exp, 0);
8738 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8740 mode, BRANCH_COST <= 1);
8742 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8743 result = expand_shift (LSHIFT_EXPR, mode, result,
8744 build_int_2 (tree_log2
8748 (safe_from_p (temp, singleton, 1)
8749 ? temp : NULL_RTX), 0);
8753 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8754 return expand_binop (mode, boptab, op1, result, temp,
8755 unsignedp, OPTAB_LIB_WIDEN);
8759 do_pending_stack_adjust ();
8761 op0 = gen_label_rtx ();
8763 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8767 /* If the target conflicts with the other operand of the
8768 binary op, we can't use it. Also, we can't use the target
8769 if it is a hard register, because evaluating the condition
8770 might clobber it. */
8772 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8773 || (GET_CODE (temp) == REG
8774 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8775 temp = gen_reg_rtx (mode);
8776 store_expr (singleton, temp,
8777 modifier == EXPAND_STACK_PARM ? 2 : 0);
8780 expand_expr (singleton,
8781 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8782 if (singleton == TREE_OPERAND (exp, 1))
8783 jumpif (TREE_OPERAND (exp, 0), op0);
8785 jumpifnot (TREE_OPERAND (exp, 0), op0);
8787 start_cleanup_deferral ();
8788 if (binary_op && temp == 0)
8789 /* Just touch the other operand. */
8790 expand_expr (TREE_OPERAND (binary_op, 1),
8791 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8793 store_expr (build (TREE_CODE (binary_op), type,
8794 make_tree (type, temp),
8795 TREE_OPERAND (binary_op, 1)),
8796 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8798 store_expr (build1 (TREE_CODE (unary_op), type,
8799 make_tree (type, temp)),
8800 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8803 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8804 comparison operator. If we have one of these cases, set the
8805 output to A, branch on A (cse will merge these two references),
8806 then set the output to FOO. */
8808 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8809 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8810 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8811 TREE_OPERAND (exp, 1), 0)
8812 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8813 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8814 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8816 if (GET_CODE (temp) == REG
8817 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8818 temp = gen_reg_rtx (mode);
8819 store_expr (TREE_OPERAND (exp, 1), temp,
8820 modifier == EXPAND_STACK_PARM ? 2 : 0);
8821 jumpif (TREE_OPERAND (exp, 0), op0);
8823 start_cleanup_deferral ();
8824 store_expr (TREE_OPERAND (exp, 2), temp,
8825 modifier == EXPAND_STACK_PARM ? 2 : 0);
8829 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8830 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8831 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8832 TREE_OPERAND (exp, 2), 0)
8833 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8834 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8835 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8837 if (GET_CODE (temp) == REG
8838 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8839 temp = gen_reg_rtx (mode);
8840 store_expr (TREE_OPERAND (exp, 2), temp,
8841 modifier == EXPAND_STACK_PARM ? 2 : 0);
8842 jumpifnot (TREE_OPERAND (exp, 0), op0);
8844 start_cleanup_deferral ();
8845 store_expr (TREE_OPERAND (exp, 1), temp,
8846 modifier == EXPAND_STACK_PARM ? 2 : 0);
8851 op1 = gen_label_rtx ();
8852 jumpifnot (TREE_OPERAND (exp, 0), op0);
8854 start_cleanup_deferral ();
8856 /* One branch of the cond can be void, if it never returns. For
8857 example A ? throw : E */
8859 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8860 store_expr (TREE_OPERAND (exp, 1), temp,
8861 modifier == EXPAND_STACK_PARM ? 2 : 0);
8863 expand_expr (TREE_OPERAND (exp, 1),
8864 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8865 end_cleanup_deferral ();
8867 emit_jump_insn (gen_jump (op1));
8870 start_cleanup_deferral ();
8872 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8873 store_expr (TREE_OPERAND (exp, 2), temp,
8874 modifier == EXPAND_STACK_PARM ? 2 : 0);
8876 expand_expr (TREE_OPERAND (exp, 2),
8877 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8880 end_cleanup_deferral ();
8891 /* Something needs to be initialized, but we didn't know
8892 where that thing was when building the tree. For example,
8893 it could be the return value of a function, or a parameter
8894 to a function which lays down in the stack, or a temporary
8895 variable which must be passed by reference.
8897 We guarantee that the expression will either be constructed
8898 or copied into our original target. */
8900 tree slot = TREE_OPERAND (exp, 0);
8901 tree cleanups = NULL_TREE;
8904 if (TREE_CODE (slot) != VAR_DECL)
8908 target = original_target;
8910 /* Set this here so that if we get a target that refers to a
8911 register variable that's already been used, put_reg_into_stack
8912 knows that it should fix up those uses. */
8913 TREE_USED (slot) = 1;
8917 if (DECL_RTL_SET_P (slot))
8919 target = DECL_RTL (slot);
8920 /* If we have already expanded the slot, so don't do
8922 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8927 target = assign_temp (type, 2, 0, 1);
8928 /* All temp slots at this level must not conflict. */
8929 preserve_temp_slots (target);
8930 SET_DECL_RTL (slot, target);
8931 if (TREE_ADDRESSABLE (slot))
8932 put_var_into_stack (slot);
8934 /* Since SLOT is not known to the called function
8935 to belong to its stack frame, we must build an explicit
8936 cleanup. This case occurs when we must build up a reference
8937 to pass the reference as an argument. In this case,
8938 it is very likely that such a reference need not be
8941 if (TREE_OPERAND (exp, 2) == 0)
8942 TREE_OPERAND (exp, 2)
8943 = (*lang_hooks.maybe_build_cleanup) (slot);
8944 cleanups = TREE_OPERAND (exp, 2);
8949 /* This case does occur, when expanding a parameter which
8950 needs to be constructed on the stack. The target
8951 is the actual stack address that we want to initialize.
8952 The function we call will perform the cleanup in this case. */
8954 /* If we have already assigned it space, use that space,
8955 not target that we were passed in, as our target
8956 parameter is only a hint. */
8957 if (DECL_RTL_SET_P (slot))
8959 target = DECL_RTL (slot);
8960 /* If we have already expanded the slot, so don't do
8962 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8967 SET_DECL_RTL (slot, target);
8968 /* If we must have an addressable slot, then make sure that
8969 the RTL that we just stored in slot is OK. */
8970 if (TREE_ADDRESSABLE (slot))
8971 put_var_into_stack (slot);
8975 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8976 /* Mark it as expanded. */
8977 TREE_OPERAND (exp, 1) = NULL_TREE;
8979 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8981 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8988 tree lhs = TREE_OPERAND (exp, 0);
8989 tree rhs = TREE_OPERAND (exp, 1);
8991 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8997 /* If lhs is complex, expand calls in rhs before computing it.
8998 That's so we don't compute a pointer and save it over a
8999 call. If lhs is simple, compute it first so we can give it
9000 as a target if the rhs is just a call. This avoids an
9001 extra temp and copy and that prevents a partial-subsumption
9002 which makes bad code. Actually we could treat
9003 component_ref's of vars like vars. */
9005 tree lhs = TREE_OPERAND (exp, 0);
9006 tree rhs = TREE_OPERAND (exp, 1);
9010 /* Check for |= or &= of a bitfield of size one into another bitfield
9011 of size 1. In this case, (unless we need the result of the
9012 assignment) we can do this more efficiently with a
9013 test followed by an assignment, if necessary.
9015 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9016 things change so we do, this code should be enhanced to
9019 && TREE_CODE (lhs) == COMPONENT_REF
9020 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9021 || TREE_CODE (rhs) == BIT_AND_EXPR)
9022 && TREE_OPERAND (rhs, 0) == lhs
9023 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9024 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9025 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9027 rtx label = gen_label_rtx ();
9029 do_jump (TREE_OPERAND (rhs, 1),
9030 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9031 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9032 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9033 (TREE_CODE (rhs) == BIT_IOR_EXPR
9035 : integer_zero_node)),
9037 do_pending_stack_adjust ();
9042 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9048 if (!TREE_OPERAND (exp, 0))
9049 expand_null_return ();
9051 expand_return (TREE_OPERAND (exp, 0));
9054 case PREINCREMENT_EXPR:
9055 case PREDECREMENT_EXPR:
9056 return expand_increment (exp, 0, ignore);
9058 case POSTINCREMENT_EXPR:
9059 case POSTDECREMENT_EXPR:
9060 /* Faster to treat as pre-increment if result is not used. */
9061 return expand_increment (exp, ! ignore, ignore);
9064 if (modifier == EXPAND_STACK_PARM)
9066 /* Are we taking the address of a nested function? */
9067 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9068 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9069 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9070 && ! TREE_STATIC (exp))
9072 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9073 op0 = force_operand (op0, target);
9075 /* If we are taking the address of something erroneous, just
9077 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9079 /* If we are taking the address of a constant and are at the
9080 top level, we have to use output_constant_def since we can't
9081 call force_const_mem at top level. */
9083 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9084 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9086 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9089 /* We make sure to pass const0_rtx down if we came in with
9090 ignore set, to avoid doing the cleanups twice for something. */
9091 op0 = expand_expr (TREE_OPERAND (exp, 0),
9092 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9093 (modifier == EXPAND_INITIALIZER
9094 ? modifier : EXPAND_CONST_ADDRESS));
9096 /* If we are going to ignore the result, OP0 will have been set
9097 to const0_rtx, so just return it. Don't get confused and
9098 think we are taking the address of the constant. */
9102 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9103 clever and returns a REG when given a MEM. */
9104 op0 = protect_from_queue (op0, 1);
9106 /* We would like the object in memory. If it is a constant, we can
9107 have it be statically allocated into memory. For a non-constant,
9108 we need to allocate some memory and store the value into it. */
9110 if (CONSTANT_P (op0))
9111 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9113 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9114 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9115 || GET_CODE (op0) == PARALLEL)
9117 /* If the operand is a SAVE_EXPR, we can deal with this by
9118 forcing the SAVE_EXPR into memory. */
9119 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9121 put_var_into_stack (TREE_OPERAND (exp, 0));
9122 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9126 /* If this object is in a register, it can't be BLKmode. */
9127 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9128 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9130 if (GET_CODE (op0) == PARALLEL)
9131 /* Handle calls that pass values in multiple
9132 non-contiguous locations. The Irix 6 ABI has examples
9134 emit_group_store (memloc, op0,
9135 int_size_in_bytes (inner_type));
9137 emit_move_insn (memloc, op0);
9143 if (GET_CODE (op0) != MEM)
9146 mark_temp_addr_taken (op0);
9147 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9149 op0 = XEXP (op0, 0);
9150 #ifdef POINTERS_EXTEND_UNSIGNED
9151 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9152 && mode == ptr_mode)
9153 op0 = convert_memory_address (ptr_mode, op0);
9158 /* If OP0 is not aligned as least as much as the type requires, we
9159 need to make a temporary, copy OP0 to it, and take the address of
9160 the temporary. We want to use the alignment of the type, not of
9161 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9162 the test for BLKmode means that can't happen. The test for
9163 BLKmode is because we never make mis-aligned MEMs with
9166 We don't need to do this at all if the machine doesn't have
9167 strict alignment. */
9168 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9169 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9171 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9173 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9175 = assign_stack_temp_for_type
9176 (TYPE_MODE (inner_type),
9177 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9178 : int_size_in_bytes (inner_type),
9179 1, build_qualified_type (inner_type,
9180 (TYPE_QUALS (inner_type)
9181 | TYPE_QUAL_CONST)));
9183 if (TYPE_ALIGN_OK (inner_type))
9186 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9187 (modifier == EXPAND_STACK_PARM
9188 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9192 op0 = force_operand (XEXP (op0, 0), target);
9196 && GET_CODE (op0) != REG
9197 && modifier != EXPAND_CONST_ADDRESS
9198 && modifier != EXPAND_INITIALIZER
9199 && modifier != EXPAND_SUM)
9200 op0 = force_reg (Pmode, op0);
9202 if (GET_CODE (op0) == REG
9203 && ! REG_USERVAR_P (op0))
9204 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9206 #ifdef POINTERS_EXTEND_UNSIGNED
9207 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9208 && mode == ptr_mode)
9209 op0 = convert_memory_address (ptr_mode, op0);
9214 case ENTRY_VALUE_EXPR:
9217 /* COMPLEX type for Extended Pascal & Fortran */
9220 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9223 /* Get the rtx code of the operands. */
9224 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9225 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9228 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9232 /* Move the real (op0) and imaginary (op1) parts to their location. */
9233 emit_move_insn (gen_realpart (mode, target), op0);
9234 emit_move_insn (gen_imagpart (mode, target), op1);
9236 insns = get_insns ();
9239 /* Complex construction should appear as a single unit. */
9240 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9241 each with a separate pseudo as destination.
9242 It's not correct for flow to treat them as a unit. */
9243 if (GET_CODE (target) != CONCAT)
9244 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9252 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9253 return gen_realpart (mode, op0);
9256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9257 return gen_imagpart (mode, op0);
9261 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9265 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9268 target = gen_reg_rtx (mode);
9272 /* Store the realpart and the negated imagpart to target. */
9273 emit_move_insn (gen_realpart (partmode, target),
9274 gen_realpart (partmode, op0));
9276 imag_t = gen_imagpart (partmode, target);
9277 temp = expand_unop (partmode,
9278 ! unsignedp && flag_trapv
9279 && (GET_MODE_CLASS(partmode) == MODE_INT)
9280 ? negv_optab : neg_optab,
9281 gen_imagpart (partmode, op0), imag_t, 0);
9283 emit_move_insn (imag_t, temp);
9285 insns = get_insns ();
9288 /* Conjugate should appear as a single unit
9289 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9290 each with a separate pseudo as destination.
9291 It's not correct for flow to treat them as a unit. */
9292 if (GET_CODE (target) != CONCAT)
9293 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9300 case TRY_CATCH_EXPR:
9302 tree handler = TREE_OPERAND (exp, 1);
9304 expand_eh_region_start ();
9306 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9308 expand_eh_region_end_cleanup (handler);
9313 case TRY_FINALLY_EXPR:
9315 tree try_block = TREE_OPERAND (exp, 0);
9316 tree finally_block = TREE_OPERAND (exp, 1);
9318 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9320 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9321 is not sufficient, so we cannot expand the block twice.
9322 So we play games with GOTO_SUBROUTINE_EXPR to let us
9323 expand the thing only once. */
9324 /* When not optimizing, we go ahead with this form since
9325 (1) user breakpoints operate more predictably without
9326 code duplication, and
9327 (2) we're not running any of the global optimizers
9328 that would explode in time/space with the highly
9329 connected CFG created by the indirect branching. */
9331 rtx finally_label = gen_label_rtx ();
9332 rtx done_label = gen_label_rtx ();
9333 rtx return_link = gen_reg_rtx (Pmode);
9334 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9335 (tree) finally_label, (tree) return_link);
9336 TREE_SIDE_EFFECTS (cleanup) = 1;
9338 /* Start a new binding layer that will keep track of all cleanup
9339 actions to be performed. */
9340 expand_start_bindings (2);
9341 target_temp_slot_level = temp_slot_level;
9343 expand_decl_cleanup (NULL_TREE, cleanup);
9344 op0 = expand_expr (try_block, target, tmode, modifier);
9346 preserve_temp_slots (op0);
9347 expand_end_bindings (NULL_TREE, 0, 0);
9348 emit_jump (done_label);
9349 emit_label (finally_label);
9350 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9351 emit_indirect_jump (return_link);
9352 emit_label (done_label);
9356 expand_start_bindings (2);
9357 target_temp_slot_level = temp_slot_level;
9359 expand_decl_cleanup (NULL_TREE, finally_block);
9360 op0 = expand_expr (try_block, target, tmode, modifier);
9362 preserve_temp_slots (op0);
9363 expand_end_bindings (NULL_TREE, 0, 0);
9369 case GOTO_SUBROUTINE_EXPR:
9371 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9372 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9373 rtx return_address = gen_label_rtx ();
9374 emit_move_insn (return_link,
9375 gen_rtx_LABEL_REF (Pmode, return_address));
9377 emit_label (return_address);
9382 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9385 return get_exception_pointer (cfun);
9388 /* Function descriptors are not valid except for as
9389 initialization constants, and should not be expanded. */
9393 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9396 /* Here to do an ordinary binary operator, generating an instruction
9397 from the optab already placed in `this_optab'. */
9399 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9401 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9402 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9404 if (modifier == EXPAND_STACK_PARM)
9406 temp = expand_binop (mode, this_optab, op0, op1, target,
9407 unsignedp, OPTAB_LIB_WIDEN);
9413 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9414 when applied to the address of EXP produces an address known to be
9415 aligned more than BIGGEST_ALIGNMENT. */
9418 is_aligning_offset (offset, exp)
9422 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9423 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9424 || TREE_CODE (offset) == NOP_EXPR
9425 || TREE_CODE (offset) == CONVERT_EXPR
9426 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9427 offset = TREE_OPERAND (offset, 0);
9429 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9430 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9431 if (TREE_CODE (offset) != BIT_AND_EXPR
9432 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9433 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9434 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9437 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9438 It must be NEGATE_EXPR. Then strip any more conversions. */
9439 offset = TREE_OPERAND (offset, 0);
9440 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9441 || TREE_CODE (offset) == NOP_EXPR
9442 || TREE_CODE (offset) == CONVERT_EXPR)
9443 offset = TREE_OPERAND (offset, 0);
9445 if (TREE_CODE (offset) != NEGATE_EXPR)
9448 offset = TREE_OPERAND (offset, 0);
9449 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9450 || TREE_CODE (offset) == NOP_EXPR
9451 || TREE_CODE (offset) == CONVERT_EXPR)
9452 offset = TREE_OPERAND (offset, 0);
9454 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9455 whose type is the same as EXP. */
9456 return (TREE_CODE (offset) == ADDR_EXPR
9457 && (TREE_OPERAND (offset, 0) == exp
9458 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9459 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9460 == TREE_TYPE (exp)))));
9463 /* Return the tree node if an ARG corresponds to a string constant or zero
9464 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9465 in bytes within the string that ARG is accessing. The type of the
9466 offset will be `sizetype'. */
9469 string_constant (arg, ptr_offset)
9475 if (TREE_CODE (arg) == ADDR_EXPR
9476 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9478 *ptr_offset = size_zero_node;
9479 return TREE_OPERAND (arg, 0);
9481 else if (TREE_CODE (arg) == PLUS_EXPR)
9483 tree arg0 = TREE_OPERAND (arg, 0);
9484 tree arg1 = TREE_OPERAND (arg, 1);
9489 if (TREE_CODE (arg0) == ADDR_EXPR
9490 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9492 *ptr_offset = convert (sizetype, arg1);
9493 return TREE_OPERAND (arg0, 0);
9495 else if (TREE_CODE (arg1) == ADDR_EXPR
9496 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9498 *ptr_offset = convert (sizetype, arg0);
9499 return TREE_OPERAND (arg1, 0);
9506 /* Expand code for a post- or pre- increment or decrement
9507 and return the RTX for the result.
9508 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9511 expand_increment (exp, post, ignore)
9517 tree incremented = TREE_OPERAND (exp, 0);
9518 optab this_optab = add_optab;
9520 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9521 int op0_is_copy = 0;
9522 int single_insn = 0;
9523 /* 1 means we can't store into OP0 directly,
9524 because it is a subreg narrower than a word,
9525 and we don't dare clobber the rest of the word. */
9528 /* Stabilize any component ref that might need to be
9529 evaluated more than once below. */
9531 || TREE_CODE (incremented) == BIT_FIELD_REF
9532 || (TREE_CODE (incremented) == COMPONENT_REF
9533 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9534 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9535 incremented = stabilize_reference (incremented);
9536 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9537 ones into save exprs so that they don't accidentally get evaluated
9538 more than once by the code below. */
9539 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9540 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9541 incremented = save_expr (incremented);
9543 /* Compute the operands as RTX.
9544 Note whether OP0 is the actual lvalue or a copy of it:
9545 I believe it is a copy iff it is a register or subreg
9546 and insns were generated in computing it. */
9548 temp = get_last_insn ();
9549 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9551 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9552 in place but instead must do sign- or zero-extension during assignment,
9553 so we copy it into a new register and let the code below use it as
9556 Note that we can safely modify this SUBREG since it is know not to be
9557 shared (it was made by the expand_expr call above). */
9559 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9562 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9566 else if (GET_CODE (op0) == SUBREG
9567 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9569 /* We cannot increment this SUBREG in place. If we are
9570 post-incrementing, get a copy of the old value. Otherwise,
9571 just mark that we cannot increment in place. */
9573 op0 = copy_to_reg (op0);
9578 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9579 && temp != get_last_insn ());
9580 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9582 /* Decide whether incrementing or decrementing. */
9583 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9584 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9585 this_optab = sub_optab;
9587 /* Convert decrement by a constant into a negative increment. */
9588 if (this_optab == sub_optab
9589 && GET_CODE (op1) == CONST_INT)
9591 op1 = GEN_INT (-INTVAL (op1));
9592 this_optab = add_optab;
9595 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9596 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9598 /* For a preincrement, see if we can do this with a single instruction. */
9601 icode = (int) this_optab->handlers[(int) mode].insn_code;
9602 if (icode != (int) CODE_FOR_nothing
9603 /* Make sure that OP0 is valid for operands 0 and 1
9604 of the insn we want to queue. */
9605 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9606 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9607 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9611 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9612 then we cannot just increment OP0. We must therefore contrive to
9613 increment the original value. Then, for postincrement, we can return
9614 OP0 since it is a copy of the old value. For preincrement, expand here
9615 unless we can do it with a single insn.
9617 Likewise if storing directly into OP0 would clobber high bits
9618 we need to preserve (bad_subreg). */
9619 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9621 /* This is the easiest way to increment the value wherever it is.
9622 Problems with multiple evaluation of INCREMENTED are prevented
9623 because either (1) it is a component_ref or preincrement,
9624 in which case it was stabilized above, or (2) it is an array_ref
9625 with constant index in an array in a register, which is
9626 safe to reevaluate. */
9627 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9628 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9629 ? MINUS_EXPR : PLUS_EXPR),
9632 TREE_OPERAND (exp, 1));
9634 while (TREE_CODE (incremented) == NOP_EXPR
9635 || TREE_CODE (incremented) == CONVERT_EXPR)
9637 newexp = convert (TREE_TYPE (incremented), newexp);
9638 incremented = TREE_OPERAND (incremented, 0);
9641 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9642 return post ? op0 : temp;
9647 /* We have a true reference to the value in OP0.
9648 If there is an insn to add or subtract in this mode, queue it.
9649 Queueing the increment insn avoids the register shuffling
9650 that often results if we must increment now and first save
9651 the old value for subsequent use. */
9653 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9654 op0 = stabilize (op0);
9657 icode = (int) this_optab->handlers[(int) mode].insn_code;
9658 if (icode != (int) CODE_FOR_nothing
9659 /* Make sure that OP0 is valid for operands 0 and 1
9660 of the insn we want to queue. */
9661 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9662 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9664 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9665 op1 = force_reg (mode, op1);
9667 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9669 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9671 rtx addr = (general_operand (XEXP (op0, 0), mode)
9672 ? force_reg (Pmode, XEXP (op0, 0))
9673 : copy_to_reg (XEXP (op0, 0)));
9676 op0 = replace_equiv_address (op0, addr);
9677 temp = force_reg (GET_MODE (op0), op0);
9678 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9679 op1 = force_reg (mode, op1);
9681 /* The increment queue is LIFO, thus we have to `queue'
9682 the instructions in reverse order. */
9683 enqueue_insn (op0, gen_move_insn (op0, temp));
9684 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9689 /* Preincrement, or we can't increment with one simple insn. */
9691 /* Save a copy of the value before inc or dec, to return it later. */
9692 temp = value = copy_to_reg (op0);
9694 /* Arrange to return the incremented value. */
9695 /* Copy the rtx because expand_binop will protect from the queue,
9696 and the results of that would be invalid for us to return
9697 if our caller does emit_queue before using our result. */
9698 temp = copy_rtx (value = op0);
9700 /* Increment however we can. */
9701 op1 = expand_binop (mode, this_optab, value, op1, op0,
9702 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9704 /* Make sure the value is stored into OP0. */
9706 emit_move_insn (op0, op1);
9711 /* At the start of a function, record that we have no previously-pushed
9712 arguments waiting to be popped. */
9715 init_pending_stack_adjust ()
9717 pending_stack_adjust = 0;
9720 /* When exiting from function, if safe, clear out any pending stack adjust
9721 so the adjustment won't get done.
9723 Note, if the current function calls alloca, then it must have a
9724 frame pointer regardless of the value of flag_omit_frame_pointer. */
9727 clear_pending_stack_adjust ()
9729 #ifdef EXIT_IGNORE_STACK
9731 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9732 && EXIT_IGNORE_STACK
9733 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9734 && ! flag_inline_functions)
9736 stack_pointer_delta -= pending_stack_adjust,
9737 pending_stack_adjust = 0;
9742 /* Pop any previously-pushed arguments that have not been popped yet. */
9745 do_pending_stack_adjust ()
9747 if (inhibit_defer_pop == 0)
9749 if (pending_stack_adjust != 0)
9750 adjust_stack (GEN_INT (pending_stack_adjust));
9751 pending_stack_adjust = 0;
9755 /* Expand conditional expressions. */
9757 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9758 LABEL is an rtx of code CODE_LABEL, in this function and all the
9762 jumpifnot (exp, label)
9766 do_jump (exp, label, NULL_RTX);
9769 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9776 do_jump (exp, NULL_RTX, label);
9779 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9780 the result is zero, or IF_TRUE_LABEL if the result is one.
9781 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9782 meaning fall through in that case.
9784 do_jump always does any pending stack adjust except when it does not
9785 actually perform a jump. An example where there is no jump
9786 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9788 This function is responsible for optimizing cases such as
9789 &&, || and comparison operators in EXP. */
9792 do_jump (exp, if_false_label, if_true_label)
9794 rtx if_false_label, if_true_label;
9796 enum tree_code code = TREE_CODE (exp);
9797 /* Some cases need to create a label to jump to
9798 in order to properly fall through.
9799 These cases set DROP_THROUGH_LABEL nonzero. */
9800 rtx drop_through_label = 0;
9804 enum machine_mode mode;
9806 #ifdef MAX_INTEGER_COMPUTATION_MODE
9807 check_max_integer_computation_mode (exp);
9818 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9824 /* This is not true with #pragma weak */
9826 /* The address of something can never be zero. */
9828 emit_jump (if_true_label);
9833 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9834 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9835 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9836 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9839 /* If we are narrowing the operand, we have to do the compare in the
9841 if ((TYPE_PRECISION (TREE_TYPE (exp))
9842 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9844 case NON_LVALUE_EXPR:
9845 case REFERENCE_EXPR:
9850 /* These cannot change zero->nonzero or vice versa. */
9851 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9854 case WITH_RECORD_EXPR:
9855 /* Put the object on the placeholder list, recurse through our first
9856 operand, and pop the list. */
9857 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9859 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9860 placeholder_list = TREE_CHAIN (placeholder_list);
9864 /* This is never less insns than evaluating the PLUS_EXPR followed by
9865 a test and can be longer if the test is eliminated. */
9867 /* Reduce to minus. */
9868 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9869 TREE_OPERAND (exp, 0),
9870 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9871 TREE_OPERAND (exp, 1))));
9872 /* Process as MINUS. */
9876 /* Nonzero iff operands of minus differ. */
9877 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9878 TREE_OPERAND (exp, 0),
9879 TREE_OPERAND (exp, 1)),
9880 NE, NE, if_false_label, if_true_label);
9884 /* If we are AND'ing with a small constant, do this comparison in the
9885 smallest type that fits. If the machine doesn't have comparisons
9886 that small, it will be converted back to the wider comparison.
9887 This helps if we are testing the sign bit of a narrower object.
9888 combine can't do this for us because it can't know whether a
9889 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9891 if (! SLOW_BYTE_ACCESS
9892 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9893 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9894 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9895 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9896 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9897 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9898 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9899 != CODE_FOR_nothing))
9901 do_jump (convert (type, exp), if_false_label, if_true_label);
9906 case TRUTH_NOT_EXPR:
9907 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9910 case TRUTH_ANDIF_EXPR:
9911 if (if_false_label == 0)
9912 if_false_label = drop_through_label = gen_label_rtx ();
9913 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9914 start_cleanup_deferral ();
9915 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9916 end_cleanup_deferral ();
9919 case TRUTH_ORIF_EXPR:
9920 if (if_true_label == 0)
9921 if_true_label = drop_through_label = gen_label_rtx ();
9922 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9923 start_cleanup_deferral ();
9924 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9925 end_cleanup_deferral ();
9930 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9931 preserve_temp_slots (NULL_RTX);
9935 do_pending_stack_adjust ();
9936 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9942 case ARRAY_RANGE_REF:
9944 HOST_WIDE_INT bitsize, bitpos;
9946 enum machine_mode mode;
9951 /* Get description of this reference. We don't actually care
9952 about the underlying object here. */
9953 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9954 &unsignedp, &volatilep);
9956 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9957 if (! SLOW_BYTE_ACCESS
9958 && type != 0 && bitsize >= 0
9959 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9960 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9961 != CODE_FOR_nothing))
9963 do_jump (convert (type, exp), if_false_label, if_true_label);
9970 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9971 if (integer_onep (TREE_OPERAND (exp, 1))
9972 && integer_zerop (TREE_OPERAND (exp, 2)))
9973 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9975 else if (integer_zerop (TREE_OPERAND (exp, 1))
9976 && integer_onep (TREE_OPERAND (exp, 2)))
9977 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9981 rtx label1 = gen_label_rtx ();
9982 drop_through_label = gen_label_rtx ();
9984 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9986 start_cleanup_deferral ();
9987 /* Now the THEN-expression. */
9988 do_jump (TREE_OPERAND (exp, 1),
9989 if_false_label ? if_false_label : drop_through_label,
9990 if_true_label ? if_true_label : drop_through_label);
9991 /* In case the do_jump just above never jumps. */
9992 do_pending_stack_adjust ();
9993 emit_label (label1);
9995 /* Now the ELSE-expression. */
9996 do_jump (TREE_OPERAND (exp, 2),
9997 if_false_label ? if_false_label : drop_through_label,
9998 if_true_label ? if_true_label : drop_through_label);
9999 end_cleanup_deferral ();
10005 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10007 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10008 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10010 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10011 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10014 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10015 fold (build (EQ_EXPR, TREE_TYPE (exp),
10016 fold (build1 (REALPART_EXPR,
10017 TREE_TYPE (inner_type),
10019 fold (build1 (REALPART_EXPR,
10020 TREE_TYPE (inner_type),
10022 fold (build (EQ_EXPR, TREE_TYPE (exp),
10023 fold (build1 (IMAGPART_EXPR,
10024 TREE_TYPE (inner_type),
10026 fold (build1 (IMAGPART_EXPR,
10027 TREE_TYPE (inner_type),
10029 if_false_label, if_true_label);
10032 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10033 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10035 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10036 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10037 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10039 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10045 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10047 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10048 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10050 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10051 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10054 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10055 fold (build (NE_EXPR, TREE_TYPE (exp),
10056 fold (build1 (REALPART_EXPR,
10057 TREE_TYPE (inner_type),
10059 fold (build1 (REALPART_EXPR,
10060 TREE_TYPE (inner_type),
10062 fold (build (NE_EXPR, TREE_TYPE (exp),
10063 fold (build1 (IMAGPART_EXPR,
10064 TREE_TYPE (inner_type),
10066 fold (build1 (IMAGPART_EXPR,
10067 TREE_TYPE (inner_type),
10069 if_false_label, if_true_label);
10072 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10073 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10075 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10076 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10077 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10079 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10084 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10085 if (GET_MODE_CLASS (mode) == MODE_INT
10086 && ! can_compare_p (LT, mode, ccp_jump))
10087 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10089 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10093 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10094 if (GET_MODE_CLASS (mode) == MODE_INT
10095 && ! can_compare_p (LE, mode, ccp_jump))
10096 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10098 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10102 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10103 if (GET_MODE_CLASS (mode) == MODE_INT
10104 && ! can_compare_p (GT, mode, ccp_jump))
10105 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10107 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10111 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10112 if (GET_MODE_CLASS (mode) == MODE_INT
10113 && ! can_compare_p (GE, mode, ccp_jump))
10114 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10116 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10119 case UNORDERED_EXPR:
10122 enum rtx_code cmp, rcmp;
10125 if (code == UNORDERED_EXPR)
10126 cmp = UNORDERED, rcmp = ORDERED;
10128 cmp = ORDERED, rcmp = UNORDERED;
10129 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10132 if (! can_compare_p (cmp, mode, ccp_jump)
10133 && (can_compare_p (rcmp, mode, ccp_jump)
10134 /* If the target doesn't provide either UNORDERED or ORDERED
10135 comparisons, canonicalize on UNORDERED for the library. */
10136 || rcmp == UNORDERED))
10140 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10142 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10147 enum rtx_code rcode1;
10148 enum tree_code tcode2;
10153 goto unordered_bcc;
10157 goto unordered_bcc;
10161 goto unordered_bcc;
10165 goto unordered_bcc;
10169 goto unordered_bcc;
10172 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10173 if (can_compare_p (rcode1, mode, ccp_jump))
10174 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10178 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10179 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10182 /* If the target doesn't support combined unordered
10183 compares, decompose into UNORDERED + comparison. */
10184 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10185 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10186 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10187 do_jump (exp, if_false_label, if_true_label);
10193 __builtin_expect (<test>, 0) and
10194 __builtin_expect (<test>, 1)
10196 We need to do this here, so that <test> is not converted to a SCC
10197 operation on machines that use condition code registers and COMPARE
10198 like the PowerPC, and then the jump is done based on whether the SCC
10199 operation produced a 1 or 0. */
10201 /* Check for a built-in function. */
10202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10204 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10205 tree arglist = TREE_OPERAND (exp, 1);
10207 if (TREE_CODE (fndecl) == FUNCTION_DECL
10208 && DECL_BUILT_IN (fndecl)
10209 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10210 && arglist != NULL_TREE
10211 && TREE_CHAIN (arglist) != NULL_TREE)
10213 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10216 if (seq != NULL_RTX)
10223 /* fall through and generate the normal code. */
10227 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10229 /* This is not needed any more and causes poor code since it causes
10230 comparisons and tests from non-SI objects to have different code
10232 /* Copy to register to avoid generating bad insns by cse
10233 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10234 if (!cse_not_expected && GET_CODE (temp) == MEM)
10235 temp = copy_to_reg (temp);
10237 do_pending_stack_adjust ();
10238 /* Do any postincrements in the expression that was tested. */
10241 if (GET_CODE (temp) == CONST_INT
10242 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10243 || GET_CODE (temp) == LABEL_REF)
10245 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10247 emit_jump (target);
10249 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10250 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10251 /* Note swapping the labels gives us not-equal. */
10252 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10253 else if (GET_MODE (temp) != VOIDmode)
10254 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10255 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10256 GET_MODE (temp), NULL_RTX,
10257 if_false_label, if_true_label);
10262 if (drop_through_label)
10264 /* If do_jump produces code that might be jumped around,
10265 do any stack adjusts from that code, before the place
10266 where control merges in. */
10267 do_pending_stack_adjust ();
10268 emit_label (drop_through_label);
10272 /* Given a comparison expression EXP for values too wide to be compared
10273 with one insn, test the comparison and jump to the appropriate label.
10274 The code of EXP is ignored; we always test GT if SWAP is 0,
10275 and LT if SWAP is 1. */
10278 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10281 rtx if_false_label, if_true_label;
10283 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10284 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10285 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10286 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10288 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10291 /* Compare OP0 with OP1, word at a time, in mode MODE.
10292 UNSIGNEDP says to do unsigned comparison.
10293 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10296 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10297 enum machine_mode mode;
10300 rtx if_false_label, if_true_label;
10302 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10303 rtx drop_through_label = 0;
10306 if (! if_true_label || ! if_false_label)
10307 drop_through_label = gen_label_rtx ();
10308 if (! if_true_label)
10309 if_true_label = drop_through_label;
10310 if (! if_false_label)
10311 if_false_label = drop_through_label;
10313 /* Compare a word at a time, high order first. */
10314 for (i = 0; i < nwords; i++)
10316 rtx op0_word, op1_word;
10318 if (WORDS_BIG_ENDIAN)
10320 op0_word = operand_subword_force (op0, i, mode);
10321 op1_word = operand_subword_force (op1, i, mode);
10325 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10326 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10329 /* All but high-order word must be compared as unsigned. */
10330 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10331 (unsignedp || i > 0), word_mode, NULL_RTX,
10332 NULL_RTX, if_true_label);
10334 /* Consider lower words only if these are equal. */
10335 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10336 NULL_RTX, NULL_RTX, if_false_label);
10339 if (if_false_label)
10340 emit_jump (if_false_label);
10341 if (drop_through_label)
10342 emit_label (drop_through_label);
10345 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10346 with one insn, test the comparison and jump to the appropriate label. */
10349 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10351 rtx if_false_label, if_true_label;
10353 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10354 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10355 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10356 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10358 rtx drop_through_label = 0;
10360 if (! if_false_label)
10361 drop_through_label = if_false_label = gen_label_rtx ();
10363 for (i = 0; i < nwords; i++)
10364 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10365 operand_subword_force (op1, i, mode),
10366 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10367 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10370 emit_jump (if_true_label);
10371 if (drop_through_label)
10372 emit_label (drop_through_label);
10375 /* Jump according to whether OP0 is 0.
10376 We assume that OP0 has an integer mode that is too wide
10377 for the available compare insns. */
10380 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10382 rtx if_false_label, if_true_label;
10384 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10387 rtx drop_through_label = 0;
10389 /* The fastest way of doing this comparison on almost any machine is to
10390 "or" all the words and compare the result. If all have to be loaded
10391 from memory and this is a very wide item, it's possible this may
10392 be slower, but that's highly unlikely. */
10394 part = gen_reg_rtx (word_mode);
10395 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10396 for (i = 1; i < nwords && part != 0; i++)
10397 part = expand_binop (word_mode, ior_optab, part,
10398 operand_subword_force (op0, i, GET_MODE (op0)),
10399 part, 1, OPTAB_WIDEN);
10403 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10404 NULL_RTX, if_false_label, if_true_label);
10409 /* If we couldn't do the "or" simply, do this with a series of compares. */
10410 if (! if_false_label)
10411 drop_through_label = if_false_label = gen_label_rtx ();
10413 for (i = 0; i < nwords; i++)
10414 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10415 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10416 if_false_label, NULL_RTX);
10419 emit_jump (if_true_label);
10421 if (drop_through_label)
10422 emit_label (drop_through_label);
10425 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10426 (including code to compute the values to be compared)
10427 and set (CC0) according to the result.
10428 The decision as to signed or unsigned comparison must be made by the caller.
10430 We force a stack adjustment unless there are currently
10431 things pushed on the stack that aren't yet used.
10433 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10437 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10439 enum rtx_code code;
10441 enum machine_mode mode;
10444 enum rtx_code ucode;
10447 /* If one operand is constant, make it the second one. Only do this
10448 if the other operand is not constant as well. */
10450 if (swap_commutative_operands_p (op0, op1))
10455 code = swap_condition (code);
10458 if (flag_force_mem)
10460 op0 = force_not_mem (op0);
10461 op1 = force_not_mem (op1);
10464 do_pending_stack_adjust ();
10466 ucode = unsignedp ? unsigned_condition (code) : code;
10467 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10471 /* There's no need to do this now that combine.c can eliminate lots of
10472 sign extensions. This can be less efficient in certain cases on other
10475 /* If this is a signed equality comparison, we can do it as an
10476 unsigned comparison since zero-extension is cheaper than sign
10477 extension and comparisons with zero are done as unsigned. This is
10478 the case even on machines that can do fast sign extension, since
10479 zero-extension is easier to combine with other operations than
10480 sign-extension is. If we are comparing against a constant, we must
10481 convert it to what it would look like unsigned. */
10482 if ((code == EQ || code == NE) && ! unsignedp
10483 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10485 if (GET_CODE (op1) == CONST_INT
10486 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10487 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10492 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10495 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10497 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10501 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10502 The decision as to signed or unsigned comparison must be made by the caller.
10504 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10508 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10509 if_false_label, if_true_label)
10511 enum rtx_code code;
10513 enum machine_mode mode;
10515 rtx if_false_label, if_true_label;
10517 enum rtx_code ucode;
10519 int dummy_true_label = 0;
10521 /* Reverse the comparison if that is safe and we want to jump if it is
10523 if (! if_true_label && ! FLOAT_MODE_P (mode))
10525 if_true_label = if_false_label;
10526 if_false_label = 0;
10527 code = reverse_condition (code);
10530 /* If one operand is constant, make it the second one. Only do this
10531 if the other operand is not constant as well. */
10533 if (swap_commutative_operands_p (op0, op1))
10538 code = swap_condition (code);
10541 if (flag_force_mem)
10543 op0 = force_not_mem (op0);
10544 op1 = force_not_mem (op1);
10547 do_pending_stack_adjust ();
10549 ucode = unsignedp ? unsigned_condition (code) : code;
10550 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10552 if (tem == const_true_rtx)
10555 emit_jump (if_true_label);
10559 if (if_false_label)
10560 emit_jump (if_false_label);
10566 /* There's no need to do this now that combine.c can eliminate lots of
10567 sign extensions. This can be less efficient in certain cases on other
10570 /* If this is a signed equality comparison, we can do it as an
10571 unsigned comparison since zero-extension is cheaper than sign
10572 extension and comparisons with zero are done as unsigned. This is
10573 the case even on machines that can do fast sign extension, since
10574 zero-extension is easier to combine with other operations than
10575 sign-extension is. If we are comparing against a constant, we must
10576 convert it to what it would look like unsigned. */
10577 if ((code == EQ || code == NE) && ! unsignedp
10578 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10580 if (GET_CODE (op1) == CONST_INT
10581 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10582 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10587 if (! if_true_label)
10589 dummy_true_label = 1;
10590 if_true_label = gen_label_rtx ();
10593 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10596 if (if_false_label)
10597 emit_jump (if_false_label);
10598 if (dummy_true_label)
10599 emit_label (if_true_label);
10602 /* Generate code for a comparison expression EXP (including code to compute
10603 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10604 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10605 generated code will drop through.
10606 SIGNED_CODE should be the rtx operation for this comparison for
10607 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10609 We force a stack adjustment unless there are currently
10610 things pushed on the stack that aren't yet used. */
10613 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10616 enum rtx_code signed_code, unsigned_code;
10617 rtx if_false_label, if_true_label;
10621 enum machine_mode mode;
10623 enum rtx_code code;
10625 /* Don't crash if the comparison was erroneous. */
10626 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10627 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10630 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10631 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10634 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10635 mode = TYPE_MODE (type);
10636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10637 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10638 || (GET_MODE_BITSIZE (mode)
10639 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10642 /* op0 might have been replaced by promoted constant, in which
10643 case the type of second argument should be used. */
10644 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10645 mode = TYPE_MODE (type);
10647 unsignedp = TREE_UNSIGNED (type);
10648 code = unsignedp ? unsigned_code : signed_code;
10650 #ifdef HAVE_canonicalize_funcptr_for_compare
10651 /* If function pointers need to be "canonicalized" before they can
10652 be reliably compared, then canonicalize them. */
10653 if (HAVE_canonicalize_funcptr_for_compare
10654 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10655 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10658 rtx new_op0 = gen_reg_rtx (mode);
10660 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10664 if (HAVE_canonicalize_funcptr_for_compare
10665 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10666 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10669 rtx new_op1 = gen_reg_rtx (mode);
10671 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10676 /* Do any postincrements in the expression that was tested. */
10679 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10681 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10682 if_false_label, if_true_label);
10685 /* Generate code to calculate EXP using a store-flag instruction
10686 and return an rtx for the result. EXP is either a comparison
10687 or a TRUTH_NOT_EXPR whose operand is a comparison.
10689 If TARGET is nonzero, store the result there if convenient.
10691 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10694 Return zero if there is no suitable set-flag instruction
10695 available on this machine.
10697 Once expand_expr has been called on the arguments of the comparison,
10698 we are committed to doing the store flag, since it is not safe to
10699 re-evaluate the expression. We emit the store-flag insn by calling
10700 emit_store_flag, but only expand the arguments if we have a reason
10701 to believe that emit_store_flag will be successful. If we think that
10702 it will, but it isn't, we have to simulate the store-flag with a
10703 set/jump/set sequence. */
10706 do_store_flag (exp, target, mode, only_cheap)
10709 enum machine_mode mode;
10712 enum rtx_code code;
10713 tree arg0, arg1, type;
10715 enum machine_mode operand_mode;
10719 enum insn_code icode;
10720 rtx subtarget = target;
10723 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10724 result at the end. We can't simply invert the test since it would
10725 have already been inverted if it were valid. This case occurs for
10726 some floating-point comparisons. */
10728 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10729 invert = 1, exp = TREE_OPERAND (exp, 0);
10731 arg0 = TREE_OPERAND (exp, 0);
10732 arg1 = TREE_OPERAND (exp, 1);
10734 /* Don't crash if the comparison was erroneous. */
10735 if (arg0 == error_mark_node || arg1 == error_mark_node)
10738 type = TREE_TYPE (arg0);
10739 operand_mode = TYPE_MODE (type);
10740 unsignedp = TREE_UNSIGNED (type);
10742 /* We won't bother with BLKmode store-flag operations because it would mean
10743 passing a lot of information to emit_store_flag. */
10744 if (operand_mode == BLKmode)
10747 /* We won't bother with store-flag operations involving function pointers
10748 when function pointers must be canonicalized before comparisons. */
10749 #ifdef HAVE_canonicalize_funcptr_for_compare
10750 if (HAVE_canonicalize_funcptr_for_compare
10751 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10752 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10754 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10755 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10756 == FUNCTION_TYPE))))
10763 /* Get the rtx comparison code to use. We know that EXP is a comparison
10764 operation of some type. Some comparisons against 1 and -1 can be
10765 converted to comparisons with zero. Do so here so that the tests
10766 below will be aware that we have a comparison with zero. These
10767 tests will not catch constants in the first operand, but constants
10768 are rarely passed as the first operand. */
10770 switch (TREE_CODE (exp))
10779 if (integer_onep (arg1))
10780 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10782 code = unsignedp ? LTU : LT;
10785 if (! unsignedp && integer_all_onesp (arg1))
10786 arg1 = integer_zero_node, code = LT;
10788 code = unsignedp ? LEU : LE;
10791 if (! unsignedp && integer_all_onesp (arg1))
10792 arg1 = integer_zero_node, code = GE;
10794 code = unsignedp ? GTU : GT;
10797 if (integer_onep (arg1))
10798 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10800 code = unsignedp ? GEU : GE;
10803 case UNORDERED_EXPR:
10829 /* Put a constant second. */
10830 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10832 tem = arg0; arg0 = arg1; arg1 = tem;
10833 code = swap_condition (code);
10836 /* If this is an equality or inequality test of a single bit, we can
10837 do this by shifting the bit being tested to the low-order bit and
10838 masking the result with the constant 1. If the condition was EQ,
10839 we xor it with 1. This does not require an scc insn and is faster
10840 than an scc insn even if we have it. */
10842 if ((code == NE || code == EQ)
10843 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10844 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10846 tree inner = TREE_OPERAND (arg0, 0);
10847 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10850 /* If INNER is a right shift of a constant and it plus BITNUM does
10851 not overflow, adjust BITNUM and INNER. */
10853 if (TREE_CODE (inner) == RSHIFT_EXPR
10854 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10855 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10856 && bitnum < TYPE_PRECISION (type)
10857 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10858 bitnum - TYPE_PRECISION (type)))
10860 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10861 inner = TREE_OPERAND (inner, 0);
10864 /* If we are going to be able to omit the AND below, we must do our
10865 operations as unsigned. If we must use the AND, we have a choice.
10866 Normally unsigned is faster, but for some machines signed is. */
10867 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10868 #ifdef LOAD_EXTEND_OP
10869 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10875 if (! get_subtarget (subtarget)
10876 || GET_MODE (subtarget) != operand_mode
10877 || ! safe_from_p (subtarget, inner, 1))
10880 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10883 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10884 size_int (bitnum), subtarget, ops_unsignedp);
10886 if (GET_MODE (op0) != mode)
10887 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10889 if ((code == EQ && ! invert) || (code == NE && invert))
10890 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10891 ops_unsignedp, OPTAB_LIB_WIDEN);
10893 /* Put the AND last so it can combine with more things. */
10894 if (bitnum != TYPE_PRECISION (type) - 1)
10895 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10900 /* Now see if we are likely to be able to do this. Return if not. */
10901 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10904 icode = setcc_gen_code[(int) code];
10905 if (icode == CODE_FOR_nothing
10906 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10908 /* We can only do this if it is one of the special cases that
10909 can be handled without an scc insn. */
10910 if ((code == LT && integer_zerop (arg1))
10911 || (! only_cheap && code == GE && integer_zerop (arg1)))
10913 else if (BRANCH_COST >= 0
10914 && ! only_cheap && (code == NE || code == EQ)
10915 && TREE_CODE (type) != REAL_TYPE
10916 && ((abs_optab->handlers[(int) operand_mode].insn_code
10917 != CODE_FOR_nothing)
10918 || (ffs_optab->handlers[(int) operand_mode].insn_code
10919 != CODE_FOR_nothing)))
10925 if (! get_subtarget (target)
10926 || GET_MODE (subtarget) != operand_mode
10927 || ! safe_from_p (subtarget, arg1, 1))
10930 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10931 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10934 target = gen_reg_rtx (mode);
10936 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10937 because, if the emit_store_flag does anything it will succeed and
10938 OP0 and OP1 will not be used subsequently. */
10940 result = emit_store_flag (target, code,
10941 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10942 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10943 operand_mode, unsignedp, 1);
10948 result = expand_binop (mode, xor_optab, result, const1_rtx,
10949 result, 0, OPTAB_LIB_WIDEN);
10953 /* If this failed, we have to do this with set/compare/jump/set code. */
10954 if (GET_CODE (target) != REG
10955 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10956 target = gen_reg_rtx (GET_MODE (target));
10958 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10959 result = compare_from_rtx (op0, op1, code, unsignedp,
10960 operand_mode, NULL_RTX);
10961 if (GET_CODE (result) == CONST_INT)
10962 return (((result == const0_rtx && ! invert)
10963 || (result != const0_rtx && invert))
10964 ? const0_rtx : const1_rtx);
10966 /* The code of RESULT may not match CODE if compare_from_rtx
10967 decided to swap its operands and reverse the original code.
10969 We know that compare_from_rtx returns either a CONST_INT or
10970 a new comparison code, so it is safe to just extract the
10971 code from RESULT. */
10972 code = GET_CODE (result);
10974 label = gen_label_rtx ();
10975 if (bcc_gen_fctn[(int) code] == 0)
10978 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10979 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10980 emit_label (label);
10986 /* Stubs in case we haven't got a casesi insn. */
10987 #ifndef HAVE_casesi
10988 # define HAVE_casesi 0
10989 # define gen_casesi(a, b, c, d, e) (0)
10990 # define CODE_FOR_casesi CODE_FOR_nothing
10993 /* If the machine does not have a case insn that compares the bounds,
10994 this means extra overhead for dispatch tables, which raises the
10995 threshold for using them. */
10996 #ifndef CASE_VALUES_THRESHOLD
10997 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10998 #endif /* CASE_VALUES_THRESHOLD */
11001 case_values_threshold ()
11003 return CASE_VALUES_THRESHOLD;
11006 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11007 0 otherwise (i.e. if there is no casesi instruction). */
11009 try_casesi (index_type, index_expr, minval, range,
11010 table_label, default_label)
11011 tree index_type, index_expr, minval, range;
11012 rtx table_label ATTRIBUTE_UNUSED;
11015 enum machine_mode index_mode = SImode;
11016 int index_bits = GET_MODE_BITSIZE (index_mode);
11017 rtx op1, op2, index;
11018 enum machine_mode op_mode;
11023 /* Convert the index to SImode. */
11024 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11026 enum machine_mode omode = TYPE_MODE (index_type);
11027 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11029 /* We must handle the endpoints in the original mode. */
11030 index_expr = build (MINUS_EXPR, index_type,
11031 index_expr, minval);
11032 minval = integer_zero_node;
11033 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11034 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11035 omode, 1, default_label);
11036 /* Now we can safely truncate. */
11037 index = convert_to_mode (index_mode, index, 0);
11041 if (TYPE_MODE (index_type) != index_mode)
11043 index_expr = convert ((*lang_hooks.types.type_for_size)
11044 (index_bits, 0), index_expr);
11045 index_type = TREE_TYPE (index_expr);
11048 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11051 index = protect_from_queue (index, 0);
11052 do_pending_stack_adjust ();
11054 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11055 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11057 index = copy_to_mode_reg (op_mode, index);
11059 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11061 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11062 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11063 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11064 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11066 op1 = copy_to_mode_reg (op_mode, op1);
11068 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11070 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11071 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11072 op2, TREE_UNSIGNED (TREE_TYPE (range)));
11073 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11075 op2 = copy_to_mode_reg (op_mode, op2);
11077 emit_jump_insn (gen_casesi (index, op1, op2,
11078 table_label, default_label));
11082 /* Attempt to generate a tablejump instruction; same concept. */
11083 #ifndef HAVE_tablejump
11084 #define HAVE_tablejump 0
11085 #define gen_tablejump(x, y) (0)
11088 /* Subroutine of the next function.
11090 INDEX is the value being switched on, with the lowest value
11091 in the table already subtracted.
11092 MODE is its expected mode (needed if INDEX is constant).
11093 RANGE is the length of the jump table.
11094 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11096 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11097 index value is out of range. */
11100 do_tablejump (index, mode, range, table_label, default_label)
11101 rtx index, range, table_label, default_label;
11102 enum machine_mode mode;
11106 if (INTVAL (range) > cfun->max_jumptable_ents)
11107 cfun->max_jumptable_ents = INTVAL (range);
11109 /* Do an unsigned comparison (in the proper mode) between the index
11110 expression and the value which represents the length of the range.
11111 Since we just finished subtracting the lower bound of the range
11112 from the index expression, this comparison allows us to simultaneously
11113 check that the original index expression value is both greater than
11114 or equal to the minimum value of the range and less than or equal to
11115 the maximum value of the range. */
11117 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11120 /* If index is in range, it must fit in Pmode.
11121 Convert to Pmode so we can index with it. */
11123 index = convert_to_mode (Pmode, index, 1);
11125 /* Don't let a MEM slip thru, because then INDEX that comes
11126 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11127 and break_out_memory_refs will go to work on it and mess it up. */
11128 #ifdef PIC_CASE_VECTOR_ADDRESS
11129 if (flag_pic && GET_CODE (index) != REG)
11130 index = copy_to_mode_reg (Pmode, index);
11133 /* If flag_force_addr were to affect this address
11134 it could interfere with the tricky assumptions made
11135 about addresses that contain label-refs,
11136 which may be valid only very near the tablejump itself. */
11137 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11138 GET_MODE_SIZE, because this indicates how large insns are. The other
11139 uses should all be Pmode, because they are addresses. This code
11140 could fail if addresses and insns are not the same size. */
11141 index = gen_rtx_PLUS (Pmode,
11142 gen_rtx_MULT (Pmode, index,
11143 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11144 gen_rtx_LABEL_REF (Pmode, table_label));
11145 #ifdef PIC_CASE_VECTOR_ADDRESS
11147 index = PIC_CASE_VECTOR_ADDRESS (index);
11150 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11151 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11152 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11153 RTX_UNCHANGING_P (vector) = 1;
11154 convert_move (temp, vector, 0);
11156 emit_jump_insn (gen_tablejump (temp, table_label));
11158 /* If we are generating PIC code or if the table is PC-relative, the
11159 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11160 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11165 try_tablejump (index_type, index_expr, minval, range,
11166 table_label, default_label)
11167 tree index_type, index_expr, minval, range;
11168 rtx table_label, default_label;
11172 if (! HAVE_tablejump)
11175 index_expr = fold (build (MINUS_EXPR, index_type,
11176 convert (index_type, index_expr),
11177 convert (index_type, minval)));
11178 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11180 index = protect_from_queue (index, 0);
11181 do_pending_stack_adjust ();
11183 do_tablejump (index, TYPE_MODE (index_type),
11184 convert_modes (TYPE_MODE (index_type),
11185 TYPE_MODE (TREE_TYPE (range)),
11186 expand_expr (range, NULL_RTX,
11188 TREE_UNSIGNED (TREE_TYPE (range))),
11189 table_label, default_label);
11193 /* Nonzero if the mode is a valid vector mode for this architecture.
11194 This returns nonzero even if there is no hardware support for the
11195 vector mode, but we can emulate with narrower modes. */
11198 vector_mode_valid_p (mode)
11199 enum machine_mode mode;
11201 enum mode_class class = GET_MODE_CLASS (mode);
11202 enum machine_mode innermode;
11204 /* Doh! What's going on? */
11205 if (class != MODE_VECTOR_INT
11206 && class != MODE_VECTOR_FLOAT)
11209 /* Hardware support. Woo hoo! */
11210 if (VECTOR_MODE_SUPPORTED_P (mode))
11213 innermode = GET_MODE_INNER (mode);
11215 /* We should probably return 1 if requesting V4DI and we have no DI,
11216 but we have V2DI, but this is probably very unlikely. */
11218 /* If we have support for the inner mode, we can safely emulate it.
11219 We may not have V2DI, but me can emulate with a pair of DIs. */
11220 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11223 #include "gt-expr.h"