1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (to_real != from_real)
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
622 emit_unop_insn (code, to, from, UNKNOWN);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
798 libcall = truncdfsf2_libfunc;
802 libcall = extenddfxf2_libfunc;
806 libcall = extenddftf2_libfunc;
818 libcall = truncxfsf2_libfunc;
822 libcall = truncxfdf2_libfunc;
834 libcall = trunctfsf2_libfunc;
838 libcall = trunctfdf2_libfunc;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
857 insns = get_insns ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
926 fill_value = const0_rtx;
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1136 enum machine_mode intermediate;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1166 emit_move_insn (to, tmp);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1469 move_by_pieces (to, from, len, align)
1471 unsigned HOST_WIDE_INT len;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1481 data.from_addr = from_addr;
1484 to_addr = XEXP (to, 0);
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1497 #ifdef STACK_GROWS_DOWNWARD
1503 data.to_addr = to_addr;
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1571 if (mode == VOIDmode)
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1611 if (mode == VOIDmode)
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1642 data->offset -= size;
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1667 emit_insn ((*genfun) (to1, from1));
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1702 emit_block_move (x, y, size, method)
1704 enum block_op_methods method;
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1736 if (GET_MODE (y) != BLKmode)
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1745 if (GET_CODE (y) != MEM)
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1780 block_move_libcall_safe_for_call_parm ()
1786 /* Check to see whether memcpy takes all register arguments. */
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1811 goto fail_takes_regs;
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1819 case takes_regs_yes:
1823 takes_regs = takes_regs_no;
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1838 emit_block_move_via_movstr (x, y, size, align)
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
1849 /* Since this is a move insn, we don't care about volatility. */
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1875 rtx last = get_last_insn ();
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1896 delete_insns_since (last);
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1908 emit_block_move_via_libcall (dst, src, size)
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1952 For convenience, we generate the call to bcopy this way as well. */
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1959 size_tree = make_tree (unsigned_type_node, size);
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1991 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
1995 static GTY(()) tree block_move_fn;
1998 init_block_move_fn (asmspec)
1999 const char *asmspec;
2005 if (TARGET_MEM_FUNCTIONS)
2007 fn = get_identifier ("memcpy");
2008 args = build_function_type_list (ptr_type_node, ptr_type_node,
2009 const_ptr_type_node, sizetype,
2014 fn = get_identifier ("bcopy");
2015 args = build_function_type_list (void_type_node, const_ptr_type_node,
2016 ptr_type_node, unsigned_type_node,
2020 fn = build_decl (FUNCTION_DECL, fn, args);
2021 DECL_EXTERNAL (fn) = 1;
2022 TREE_PUBLIC (fn) = 1;
2023 DECL_ARTIFICIAL (fn) = 1;
2024 TREE_NOTHROW (fn) = 1;
2031 SET_DECL_RTL (block_move_fn, NULL_RTX);
2032 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2037 emit_block_move_libcall_fn (for_call)
2040 static bool emitted_extern;
2043 init_block_move_fn (NULL);
2045 if (for_call && !emitted_extern)
2047 emitted_extern = true;
2048 make_decl_rtl (block_move_fn, NULL);
2049 assemble_external (block_move_fn);
2052 return block_move_fn;
2055 /* A subroutine of emit_block_move. Copy the data via an explicit
2056 loop. This is used only when libcalls are forbidden. */
2057 /* ??? It'd be nice to copy in hunks larger than QImode. */
2060 emit_block_move_via_loop (x, y, size, align)
2062 unsigned int align ATTRIBUTE_UNUSED;
2064 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2065 enum machine_mode iter_mode;
2067 iter_mode = GET_MODE (size);
2068 if (iter_mode == VOIDmode)
2069 iter_mode = word_mode;
2071 top_label = gen_label_rtx ();
2072 cmp_label = gen_label_rtx ();
2073 iter = gen_reg_rtx (iter_mode);
2075 emit_move_insn (iter, const0_rtx);
2077 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2078 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2079 do_pending_stack_adjust ();
2081 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2083 emit_jump (cmp_label);
2084 emit_label (top_label);
2086 tmp = convert_modes (Pmode, iter_mode, iter, true);
2087 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2088 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2089 x = change_address (x, QImode, x_addr);
2090 y = change_address (y, QImode, y_addr);
2092 emit_move_insn (x, y);
2094 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2095 true, OPTAB_LIB_WIDEN);
2097 emit_move_insn (iter, tmp);
2099 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2100 emit_label (cmp_label);
2102 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2105 emit_note (NULL, NOTE_INSN_LOOP_END);
2108 /* Copy all or part of a value X into registers starting at REGNO.
2109 The number of registers to be filled is NREGS. */
2112 move_block_to_reg (regno, x, nregs, mode)
2116 enum machine_mode mode;
2119 #ifdef HAVE_load_multiple
2127 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2128 x = validize_mem (force_const_mem (mode, x));
2130 /* See if the machine can do this with a load multiple insn. */
2131 #ifdef HAVE_load_multiple
2132 if (HAVE_load_multiple)
2134 last = get_last_insn ();
2135 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2143 delete_insns_since (last);
2147 for (i = 0; i < nregs; i++)
2148 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2149 operand_subword_force (x, i, mode));
2152 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2153 The number of registers to be filled is NREGS. SIZE indicates the number
2154 of bytes in the object X. */
2157 move_block_from_reg (regno, x, nregs, size)
2164 #ifdef HAVE_store_multiple
2168 enum machine_mode mode;
2173 /* If SIZE is that of a mode no bigger than a word, just use that
2174 mode's store operation. */
2175 if (size <= UNITS_PER_WORD
2176 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2178 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2182 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2183 to the left before storing to memory. Note that the previous test
2184 doesn't handle all cases (e.g. SIZE == 3). */
2185 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2187 rtx tem = operand_subword (x, 0, 1, BLKmode);
2193 shift = expand_shift (LSHIFT_EXPR, word_mode,
2194 gen_rtx_REG (word_mode, regno),
2195 build_int_2 ((UNITS_PER_WORD - size)
2196 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2197 emit_move_insn (tem, shift);
2201 /* See if the machine can do this with a store multiple insn. */
2202 #ifdef HAVE_store_multiple
2203 if (HAVE_store_multiple)
2205 last = get_last_insn ();
2206 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2214 delete_insns_since (last);
2218 for (i = 0; i < nregs; i++)
2220 rtx tem = operand_subword (x, i, 1, BLKmode);
2225 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2229 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2230 ORIG, where ORIG is a non-consecutive group of registers represented by
2231 a PARALLEL. The clone is identical to the original except in that the
2232 original set of registers is replaced by a new set of pseudo registers.
2233 The new set has the same modes as the original set. */
2236 gen_group_rtx (orig)
2242 if (GET_CODE (orig) != PARALLEL)
2245 length = XVECLEN (orig, 0);
2246 tmps = (rtx *) alloca (sizeof (rtx) * length);
2248 /* Skip a NULL entry in first slot. */
2249 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2254 for (; i < length; i++)
2256 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2257 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2259 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2262 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2265 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2266 registers represented by a PARALLEL. SSIZE represents the total size of
2267 block SRC in bytes, or -1 if not known. */
2268 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2269 the balance will be in what would be the low-order memory addresses, i.e.
2270 left justified for big endian, right justified for little endian. This
2271 happens to be true for the targets currently using this support. If this
2272 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2276 emit_group_load (dst, orig_src, ssize)
2283 if (GET_CODE (dst) != PARALLEL)
2286 /* Check for a NULL entry, used to indicate that the parameter goes
2287 both on the stack and in registers. */
2288 if (XEXP (XVECEXP (dst, 0, 0), 0))
2293 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2295 /* Process the pieces. */
2296 for (i = start; i < XVECLEN (dst, 0); i++)
2298 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2299 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2300 unsigned int bytelen = GET_MODE_SIZE (mode);
2303 /* Handle trailing fragments that run over the size of the struct. */
2304 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2306 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2307 bytelen = ssize - bytepos;
2312 /* If we won't be loading directly from memory, protect the real source
2313 from strange tricks we might play; but make sure that the source can
2314 be loaded directly into the destination. */
2316 if (GET_CODE (orig_src) != MEM
2317 && (!CONSTANT_P (orig_src)
2318 || (GET_MODE (orig_src) != mode
2319 && GET_MODE (orig_src) != VOIDmode)))
2321 if (GET_MODE (orig_src) == VOIDmode)
2322 src = gen_reg_rtx (mode);
2324 src = gen_reg_rtx (GET_MODE (orig_src));
2326 emit_move_insn (src, orig_src);
2329 /* Optimize the access just a bit. */
2330 if (GET_CODE (src) == MEM
2331 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2332 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2333 && bytelen == GET_MODE_SIZE (mode))
2335 tmps[i] = gen_reg_rtx (mode);
2336 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2338 else if (GET_CODE (src) == CONCAT)
2340 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2341 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2343 if ((bytepos == 0 && bytelen == slen0)
2344 || (bytepos != 0 && bytepos + bytelen <= slen))
2346 /* The following assumes that the concatenated objects all
2347 have the same size. In this case, a simple calculation
2348 can be used to determine the object and the bit field
2350 tmps[i] = XEXP (src, bytepos / slen0);
2351 if (! CONSTANT_P (tmps[i])
2352 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2353 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2354 (bytepos % slen0) * BITS_PER_UNIT,
2355 1, NULL_RTX, mode, mode, ssize);
2357 else if (bytepos == 0)
2359 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2360 emit_move_insn (mem, src);
2361 tmps[i] = adjust_address (mem, mode, 0);
2366 else if (CONSTANT_P (src)
2367 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2370 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2371 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2374 if (BYTES_BIG_ENDIAN && shift)
2375 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2376 tmps[i], 0, OPTAB_WIDEN);
2381 /* Copy the extracted pieces into the proper (probable) hard regs. */
2382 for (i = start; i < XVECLEN (dst, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2386 /* Emit code to move a block SRC to block DST, where SRC and DST are
2387 non-consecutive groups of registers, each represented by a PARALLEL. */
2390 emit_group_move (dst, src)
2395 if (GET_CODE (src) != PARALLEL
2396 || GET_CODE (dst) != PARALLEL
2397 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2400 /* Skip first entry if NULL. */
2401 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2402 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2403 XEXP (XVECEXP (src, 0, i), 0));
2406 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2407 registers represented by a PARALLEL. SSIZE represents the total size of
2408 block DST, or -1 if not known. */
2411 emit_group_store (orig_dst, src, ssize)
2418 if (GET_CODE (src) != PARALLEL)
2421 /* Check for a NULL entry, used to indicate that the parameter goes
2422 both on the stack and in registers. */
2423 if (XEXP (XVECEXP (src, 0, 0), 0))
2428 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2430 /* Copy the (probable) hard regs into pseudos. */
2431 for (i = start; i < XVECLEN (src, 0); i++)
2433 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2434 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2435 emit_move_insn (tmps[i], reg);
2439 /* If we won't be storing directly into memory, protect the real destination
2440 from strange tricks we might play. */
2442 if (GET_CODE (dst) == PARALLEL)
2446 /* We can get a PARALLEL dst if there is a conditional expression in
2447 a return statement. In that case, the dst and src are the same,
2448 so no action is necessary. */
2449 if (rtx_equal_p (dst, src))
2452 /* It is unclear if we can ever reach here, but we may as well handle
2453 it. Allocate a temporary, and split this into a store/load to/from
2456 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2457 emit_group_store (temp, src, ssize);
2458 emit_group_load (dst, temp, ssize);
2461 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2463 dst = gen_reg_rtx (GET_MODE (orig_dst));
2464 /* Make life a bit easier for combine. */
2465 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2468 /* Process the pieces. */
2469 for (i = start; i < XVECLEN (src, 0); i++)
2471 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2472 enum machine_mode mode = GET_MODE (tmps[i]);
2473 unsigned int bytelen = GET_MODE_SIZE (mode);
2476 /* Handle trailing fragments that run over the size of the struct. */
2477 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2479 if (BYTES_BIG_ENDIAN)
2481 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2482 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2483 tmps[i], 0, OPTAB_WIDEN);
2485 bytelen = ssize - bytepos;
2488 if (GET_CODE (dst) == CONCAT)
2490 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2491 dest = XEXP (dst, 0);
2492 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2494 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2495 dest = XEXP (dst, 1);
2497 else if (bytepos == 0 && XVECLEN (src, 0))
2499 dest = assign_stack_temp (GET_MODE (dest),
2500 GET_MODE_SIZE (GET_MODE (dest)), 0);
2501 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2510 /* Optimize the access just a bit. */
2511 if (GET_CODE (dest) == MEM
2512 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2513 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2514 && bytelen == GET_MODE_SIZE (mode))
2515 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2517 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2518 mode, tmps[i], ssize);
2523 /* Copy from the pseudo into the (probable) hard reg. */
2524 if (orig_dst != dst)
2525 emit_move_insn (orig_dst, dst);
2528 /* Generate code to copy a BLKmode object of TYPE out of a
2529 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2530 is null, a stack temporary is created. TGTBLK is returned.
2532 The primary purpose of this routine is to handle functions
2533 that return BLKmode structures in registers. Some machines
2534 (the PA for example) want to return all small structures
2535 in registers regardless of the structure's alignment. */
2538 copy_blkmode_from_reg (tgtblk, srcreg, type)
2543 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2544 rtx src = NULL, dst = NULL;
2545 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2546 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2550 tgtblk = assign_temp (build_qualified_type (type,
2552 | TYPE_QUAL_CONST)),
2554 preserve_temp_slots (tgtblk);
2557 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2558 into a new pseudo which is a full word. */
2560 if (GET_MODE (srcreg) != BLKmode
2561 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2562 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2564 /* Structures whose size is not a multiple of a word are aligned
2565 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2566 machine, this means we must skip the empty high order bytes when
2567 calculating the bit offset. */
2568 if (BYTES_BIG_ENDIAN
2569 && bytes % UNITS_PER_WORD)
2570 big_endian_correction
2571 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2573 /* Copy the structure BITSIZE bites at a time.
2575 We could probably emit more efficient code for machines which do not use
2576 strict alignment, but it doesn't seem worth the effort at the current
2578 for (bitpos = 0, xbitpos = big_endian_correction;
2579 bitpos < bytes * BITS_PER_UNIT;
2580 bitpos += bitsize, xbitpos += bitsize)
2582 /* We need a new source operand each time xbitpos is on a
2583 word boundary and when xbitpos == big_endian_correction
2584 (the first time through). */
2585 if (xbitpos % BITS_PER_WORD == 0
2586 || xbitpos == big_endian_correction)
2587 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2590 /* We need a new destination operand each time bitpos is on
2592 if (bitpos % BITS_PER_WORD == 0)
2593 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2595 /* Use xbitpos for the source extraction (right justified) and
2596 xbitpos for the destination store (left justified). */
2597 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2598 extract_bit_field (src, bitsize,
2599 xbitpos % BITS_PER_WORD, 1,
2600 NULL_RTX, word_mode, word_mode,
2608 /* Add a USE expression for REG to the (possibly empty) list pointed
2609 to by CALL_FUSAGE. REG must denote a hard register. */
2612 use_reg (call_fusage, reg)
2613 rtx *call_fusage, reg;
2615 if (GET_CODE (reg) != REG
2616 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2620 = gen_rtx_EXPR_LIST (VOIDmode,
2621 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2624 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2625 starting at REGNO. All of these registers must be hard registers. */
2628 use_regs (call_fusage, regno, nregs)
2635 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2638 for (i = 0; i < nregs; i++)
2639 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2642 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2643 PARALLEL REGS. This is for calls that pass values in multiple
2644 non-contiguous locations. The Irix 6 ABI has examples of this. */
2647 use_group_regs (call_fusage, regs)
2653 for (i = 0; i < XVECLEN (regs, 0); i++)
2655 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2657 /* A NULL entry means the parameter goes both on the stack and in
2658 registers. This can also be a MEM for targets that pass values
2659 partially on the stack and partially in registers. */
2660 if (reg != 0 && GET_CODE (reg) == REG)
2661 use_reg (call_fusage, reg);
2666 /* Determine whether the LEN bytes generated by CONSTFUN can be
2667 stored to memory using several move instructions. CONSTFUNDATA is
2668 a pointer which will be passed as argument in every CONSTFUN call.
2669 ALIGN is maximum alignment we can assume. Return nonzero if a
2670 call to store_by_pieces should succeed. */
2673 can_store_by_pieces (len, constfun, constfundata, align)
2674 unsigned HOST_WIDE_INT len;
2675 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2679 unsigned HOST_WIDE_INT max_size, l;
2680 HOST_WIDE_INT offset = 0;
2681 enum machine_mode mode, tmode;
2682 enum insn_code icode;
2686 if (! STORE_BY_PIECES_P (len, align))
2689 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2690 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2691 align = MOVE_MAX * BITS_PER_UNIT;
2693 /* We would first store what we can in the largest integer mode, then go to
2694 successively smaller modes. */
2697 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2702 max_size = STORE_MAX_PIECES + 1;
2703 while (max_size > 1)
2705 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2706 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2707 if (GET_MODE_SIZE (tmode) < max_size)
2710 if (mode == VOIDmode)
2713 icode = mov_optab->handlers[(int) mode].insn_code;
2714 if (icode != CODE_FOR_nothing
2715 && align >= GET_MODE_ALIGNMENT (mode))
2717 unsigned int size = GET_MODE_SIZE (mode);
2724 cst = (*constfun) (constfundata, offset, mode);
2725 if (!LEGITIMATE_CONSTANT_P (cst))
2735 max_size = GET_MODE_SIZE (mode);
2738 /* The code above should have handled everything. */
2746 /* Generate several move instructions to store LEN bytes generated by
2747 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2748 pointer which will be passed as argument in every CONSTFUN call.
2749 ALIGN is maximum alignment we can assume. */
2752 store_by_pieces (to, len, constfun, constfundata, align)
2754 unsigned HOST_WIDE_INT len;
2755 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2759 struct store_by_pieces data;
2761 if (! STORE_BY_PIECES_P (len, align))
2763 to = protect_from_queue (to, 1);
2764 data.constfun = constfun;
2765 data.constfundata = constfundata;
2768 store_by_pieces_1 (&data, align);
2771 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2772 rtx with BLKmode). The caller must pass TO through protect_from_queue
2773 before calling. ALIGN is maximum alignment we can assume. */
2776 clear_by_pieces (to, len, align)
2778 unsigned HOST_WIDE_INT len;
2781 struct store_by_pieces data;
2783 data.constfun = clear_by_pieces_1;
2784 data.constfundata = NULL;
2787 store_by_pieces_1 (&data, align);
2790 /* Callback routine for clear_by_pieces.
2791 Return const0_rtx unconditionally. */
2794 clear_by_pieces_1 (data, offset, mode)
2795 PTR data ATTRIBUTE_UNUSED;
2796 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2797 enum machine_mode mode ATTRIBUTE_UNUSED;
2802 /* Subroutine of clear_by_pieces and store_by_pieces.
2803 Generate several move instructions to store LEN bytes of block TO. (A MEM
2804 rtx with BLKmode). The caller must pass TO through protect_from_queue
2805 before calling. ALIGN is maximum alignment we can assume. */
2808 store_by_pieces_1 (data, align)
2809 struct store_by_pieces *data;
2812 rtx to_addr = XEXP (data->to, 0);
2813 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2814 enum machine_mode mode = VOIDmode, tmode;
2815 enum insn_code icode;
2818 data->to_addr = to_addr;
2820 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2821 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2823 data->explicit_inc_to = 0;
2825 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2827 data->offset = data->len;
2829 /* If storing requires more than two move insns,
2830 copy addresses to registers (to make displacements shorter)
2831 and use post-increment if available. */
2832 if (!data->autinc_to
2833 && move_by_pieces_ninsns (data->len, align) > 2)
2835 /* Determine the main mode we'll be using. */
2836 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2837 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2838 if (GET_MODE_SIZE (tmode) < max_size)
2841 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2843 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2844 data->autinc_to = 1;
2845 data->explicit_inc_to = -1;
2848 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2849 && ! data->autinc_to)
2851 data->to_addr = copy_addr_to_reg (to_addr);
2852 data->autinc_to = 1;
2853 data->explicit_inc_to = 1;
2856 if ( !data->autinc_to && CONSTANT_P (to_addr))
2857 data->to_addr = copy_addr_to_reg (to_addr);
2860 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2861 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2862 align = MOVE_MAX * BITS_PER_UNIT;
2864 /* First store what we can in the largest integer mode, then go to
2865 successively smaller modes. */
2867 while (max_size > 1)
2869 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2870 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2871 if (GET_MODE_SIZE (tmode) < max_size)
2874 if (mode == VOIDmode)
2877 icode = mov_optab->handlers[(int) mode].insn_code;
2878 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2879 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2881 max_size = GET_MODE_SIZE (mode);
2884 /* The code above should have handled everything. */
2889 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2890 with move instructions for mode MODE. GENFUN is the gen_... function
2891 to make a move insn for that mode. DATA has all the other info. */
2894 store_by_pieces_2 (genfun, mode, data)
2895 rtx (*genfun) PARAMS ((rtx, ...));
2896 enum machine_mode mode;
2897 struct store_by_pieces *data;
2899 unsigned int size = GET_MODE_SIZE (mode);
2902 while (data->len >= size)
2905 data->offset -= size;
2907 if (data->autinc_to)
2908 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2911 to1 = adjust_address (data->to, mode, data->offset);
2913 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2914 emit_insn (gen_add2_insn (data->to_addr,
2915 GEN_INT (-(HOST_WIDE_INT) size)));
2917 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2918 emit_insn ((*genfun) (to1, cst));
2920 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2921 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2923 if (! data->reverse)
2924 data->offset += size;
2930 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2931 its length in bytes. */
2934 clear_storage (object, size)
2939 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2940 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2942 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2943 just move a zero. Otherwise, do this a piece at a time. */
2944 if (GET_MODE (object) != BLKmode
2945 && GET_CODE (size) == CONST_INT
2946 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2947 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2950 object = protect_from_queue (object, 1);
2951 size = protect_from_queue (size, 0);
2953 if (GET_CODE (size) == CONST_INT
2954 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2955 clear_by_pieces (object, INTVAL (size), align);
2956 else if (clear_storage_via_clrstr (object, size, align))
2959 retval = clear_storage_via_libcall (object, size);
2965 /* A subroutine of clear_storage. Expand a clrstr pattern;
2966 return true if successful. */
2969 clear_storage_via_clrstr (object, size, align)
2973 /* Try the most limited insn first, because there's no point
2974 including more than one in the machine description unless
2975 the more limited one has some advantage. */
2977 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2978 enum machine_mode mode;
2980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2981 mode = GET_MODE_WIDER_MODE (mode))
2983 enum insn_code code = clrstr_optab[(int) mode];
2984 insn_operand_predicate_fn pred;
2986 if (code != CODE_FOR_nothing
2987 /* We don't need MODE to be narrower than
2988 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2989 the mode mask, as it is returned by the macro, it will
2990 definitely be less than the actual mode mask. */
2991 && ((GET_CODE (size) == CONST_INT
2992 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2993 <= (GET_MODE_MASK (mode) >> 1)))
2994 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2995 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2996 || (*pred) (object, BLKmode))
2997 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2998 || (*pred) (opalign, VOIDmode)))
3001 rtx last = get_last_insn ();
3004 op1 = convert_to_mode (mode, size, 1);
3005 pred = insn_data[(int) code].operand[1].predicate;
3006 if (pred != 0 && ! (*pred) (op1, mode))
3007 op1 = copy_to_mode_reg (mode, op1);
3009 pat = GEN_FCN ((int) code) (object, op1, opalign);
3016 delete_insns_since (last);
3023 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3024 Return the return value of memset, 0 otherwise. */
3027 clear_storage_via_libcall (object, size)
3030 tree call_expr, arg_list, fn, object_tree, size_tree;
3031 enum machine_mode size_mode;
3034 /* OBJECT or SIZE may have been passed through protect_from_queue.
3036 It is unsafe to save the value generated by protect_from_queue
3037 and reuse it later. Consider what happens if emit_queue is
3038 called before the return value from protect_from_queue is used.
3040 Expansion of the CALL_EXPR below will call emit_queue before
3041 we are finished emitting RTL for argument setup. So if we are
3042 not careful we could get the wrong value for an argument.
3044 To avoid this problem we go ahead and emit code to copy OBJECT
3045 and SIZE into new pseudos. We can then place those new pseudos
3046 into an RTL_EXPR and use them later, even after a call to
3049 Note this is not strictly needed for library calls since they
3050 do not call emit_queue before loading their arguments. However,
3051 we may need to have library calls call emit_queue in the future
3052 since failing to do so could cause problems for targets which
3053 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3055 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3057 if (TARGET_MEM_FUNCTIONS)
3058 size_mode = TYPE_MODE (sizetype);
3060 size_mode = TYPE_MODE (unsigned_type_node);
3061 size = convert_to_mode (size_mode, size, 1);
3062 size = copy_to_mode_reg (size_mode, size);
3064 /* It is incorrect to use the libcall calling conventions to call
3065 memset in this context. This could be a user call to memset and
3066 the user may wish to examine the return value from memset. For
3067 targets where libcalls and normal calls have different conventions
3068 for returning pointers, we could end up generating incorrect code.
3070 For convenience, we generate the call to bzero this way as well. */
3072 object_tree = make_tree (ptr_type_node, object);
3073 if (TARGET_MEM_FUNCTIONS)
3074 size_tree = make_tree (sizetype, size);
3076 size_tree = make_tree (unsigned_type_node, size);
3078 fn = clear_storage_libcall_fn (true);
3079 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3080 if (TARGET_MEM_FUNCTIONS)
3081 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3082 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3084 /* Now we have to build up the CALL_EXPR itself. */
3085 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3086 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3087 call_expr, arg_list, NULL_TREE);
3088 TREE_SIDE_EFFECTS (call_expr) = 1;
3090 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3092 /* If we are initializing a readonly value, show the above call
3093 clobbered it. Otherwise, a load from it may erroneously be
3094 hoisted from a loop. */
3095 if (RTX_UNCHANGING_P (object))
3096 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3098 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3101 /* A subroutine of clear_storage_via_libcall. Create the tree node
3102 for the function we use for block clears. The first time FOR_CALL
3103 is true, we call assemble_external. */
3105 static GTY(()) tree block_clear_fn;
3108 init_block_clear_fn (asmspec)
3109 const char *asmspec;
3111 if (!block_clear_fn)
3115 if (TARGET_MEM_FUNCTIONS)
3117 fn = get_identifier ("memset");
3118 args = build_function_type_list (ptr_type_node, ptr_type_node,
3119 integer_type_node, sizetype,
3124 fn = get_identifier ("bzero");
3125 args = build_function_type_list (void_type_node, ptr_type_node,
3126 unsigned_type_node, NULL_TREE);
3129 fn = build_decl (FUNCTION_DECL, fn, args);
3130 DECL_EXTERNAL (fn) = 1;
3131 TREE_PUBLIC (fn) = 1;
3132 DECL_ARTIFICIAL (fn) = 1;
3133 TREE_NOTHROW (fn) = 1;
3135 block_clear_fn = fn;
3140 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3141 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3146 clear_storage_libcall_fn (for_call)
3149 static bool emitted_extern;
3151 if (!block_clear_fn)
3152 init_block_clear_fn (NULL);
3154 if (for_call && !emitted_extern)
3156 emitted_extern = true;
3157 make_decl_rtl (block_clear_fn, NULL);
3158 assemble_external (block_clear_fn);
3161 return block_clear_fn;
3164 /* Generate code to copy Y into X.
3165 Both Y and X must have the same mode, except that
3166 Y can be a constant with VOIDmode.
3167 This mode cannot be BLKmode; use emit_block_move for that.
3169 Return the last instruction emitted. */
3172 emit_move_insn (x, y)
3175 enum machine_mode mode = GET_MODE (x);
3176 rtx y_cst = NULL_RTX;
3179 x = protect_from_queue (x, 1);
3180 y = protect_from_queue (y, 0);
3182 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3185 /* Never force constant_p_rtx to memory. */
3186 if (GET_CODE (y) == CONSTANT_P_RTX)
3188 else if (CONSTANT_P (y))
3191 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3192 && (last_insn = compress_float_constant (x, y)))
3195 if (!LEGITIMATE_CONSTANT_P (y))
3198 y = force_const_mem (mode, y);
3200 /* If the target's cannot_force_const_mem prevented the spill,
3201 assume that the target's move expanders will also take care
3202 of the non-legitimate constant. */
3208 /* If X or Y are memory references, verify that their addresses are valid
3210 if (GET_CODE (x) == MEM
3211 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3212 && ! push_operand (x, GET_MODE (x)))
3214 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3215 x = validize_mem (x);
3217 if (GET_CODE (y) == MEM
3218 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3220 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3221 y = validize_mem (y);
3223 if (mode == BLKmode)
3226 last_insn = emit_move_insn_1 (x, y);
3228 if (y_cst && GET_CODE (x) == REG)
3229 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3234 /* Low level part of emit_move_insn.
3235 Called just like emit_move_insn, but assumes X and Y
3236 are basically valid. */
3239 emit_move_insn_1 (x, y)
3242 enum machine_mode mode = GET_MODE (x);
3243 enum machine_mode submode;
3244 enum mode_class class = GET_MODE_CLASS (mode);
3246 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3249 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3251 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3253 /* Expand complex moves by moving real part and imag part, if possible. */
3254 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3255 && BLKmode != (submode = GET_MODE_INNER (mode))
3256 && (mov_optab->handlers[(int) submode].insn_code
3257 != CODE_FOR_nothing))
3259 /* Don't split destination if it is a stack push. */
3260 int stack = push_operand (x, GET_MODE (x));
3262 #ifdef PUSH_ROUNDING
3263 /* In case we output to the stack, but the size is smaller machine can
3264 push exactly, we need to use move instructions. */
3266 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3267 != GET_MODE_SIZE (submode)))
3270 HOST_WIDE_INT offset1, offset2;
3272 /* Do not use anti_adjust_stack, since we don't want to update
3273 stack_pointer_delta. */
3274 temp = expand_binop (Pmode,
3275 #ifdef STACK_GROWS_DOWNWARD
3283 (GET_MODE_SIZE (GET_MODE (x)))),
3284 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3286 if (temp != stack_pointer_rtx)
3287 emit_move_insn (stack_pointer_rtx, temp);
3289 #ifdef STACK_GROWS_DOWNWARD
3291 offset2 = GET_MODE_SIZE (submode);
3293 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3294 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3295 + GET_MODE_SIZE (submode));
3298 emit_move_insn (change_address (x, submode,
3299 gen_rtx_PLUS (Pmode,
3301 GEN_INT (offset1))),
3302 gen_realpart (submode, y));
3303 emit_move_insn (change_address (x, submode,
3304 gen_rtx_PLUS (Pmode,
3306 GEN_INT (offset2))),
3307 gen_imagpart (submode, y));
3311 /* If this is a stack, push the highpart first, so it
3312 will be in the argument order.
3314 In that case, change_address is used only to convert
3315 the mode, not to change the address. */
3318 /* Note that the real part always precedes the imag part in memory
3319 regardless of machine's endianness. */
3320 #ifdef STACK_GROWS_DOWNWARD
3321 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3322 (gen_rtx_MEM (submode, XEXP (x, 0)),
3323 gen_imagpart (submode, y)));
3324 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3325 (gen_rtx_MEM (submode, XEXP (x, 0)),
3326 gen_realpart (submode, y)));
3328 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3329 (gen_rtx_MEM (submode, XEXP (x, 0)),
3330 gen_realpart (submode, y)));
3331 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3332 (gen_rtx_MEM (submode, XEXP (x, 0)),
3333 gen_imagpart (submode, y)));
3338 rtx realpart_x, realpart_y;
3339 rtx imagpart_x, imagpart_y;
3341 /* If this is a complex value with each part being smaller than a
3342 word, the usual calling sequence will likely pack the pieces into
3343 a single register. Unfortunately, SUBREG of hard registers only
3344 deals in terms of words, so we have a problem converting input
3345 arguments to the CONCAT of two registers that is used elsewhere
3346 for complex values. If this is before reload, we can copy it into
3347 memory and reload. FIXME, we should see about using extract and
3348 insert on integer registers, but complex short and complex char
3349 variables should be rarely used. */
3350 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3351 && (reload_in_progress | reload_completed) == 0)
3354 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3356 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3358 if (packed_dest_p || packed_src_p)
3360 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3361 ? MODE_FLOAT : MODE_INT);
3363 enum machine_mode reg_mode
3364 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3366 if (reg_mode != BLKmode)
3368 rtx mem = assign_stack_temp (reg_mode,
3369 GET_MODE_SIZE (mode), 0);
3370 rtx cmem = adjust_address (mem, mode, 0);
3373 = N_("function using short complex types cannot be inline");
3377 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3379 emit_move_insn_1 (cmem, y);
3380 return emit_move_insn_1 (sreg, mem);
3384 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3386 emit_move_insn_1 (mem, sreg);
3387 return emit_move_insn_1 (x, cmem);
3393 realpart_x = gen_realpart (submode, x);
3394 realpart_y = gen_realpart (submode, y);
3395 imagpart_x = gen_imagpart (submode, x);
3396 imagpart_y = gen_imagpart (submode, y);
3398 /* Show the output dies here. This is necessary for SUBREGs
3399 of pseudos since we cannot track their lifetimes correctly;
3400 hard regs shouldn't appear here except as return values.
3401 We never want to emit such a clobber after reload. */
3403 && ! (reload_in_progress || reload_completed)
3404 && (GET_CODE (realpart_x) == SUBREG
3405 || GET_CODE (imagpart_x) == SUBREG))
3406 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3408 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3409 (realpart_x, realpart_y));
3410 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3411 (imagpart_x, imagpart_y));
3414 return get_last_insn ();
3417 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3418 find a mode to do it in. If we have a movcc, use it. Otherwise,
3419 find the MODE_INT mode of the same width. */
3420 else if (GET_MODE_CLASS (mode) == MODE_CC
3421 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3423 enum insn_code insn_code;
3424 enum machine_mode tmode = VOIDmode;
3428 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3431 for (tmode = QImode; tmode != VOIDmode;
3432 tmode = GET_MODE_WIDER_MODE (tmode))
3433 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3436 if (tmode == VOIDmode)
3439 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3440 may call change_address which is not appropriate if we were
3441 called when a reload was in progress. We don't have to worry
3442 about changing the address since the size in bytes is supposed to
3443 be the same. Copy the MEM to change the mode and move any
3444 substitutions from the old MEM to the new one. */
3446 if (reload_in_progress)
3448 x = gen_lowpart_common (tmode, x1);
3449 if (x == 0 && GET_CODE (x1) == MEM)
3451 x = adjust_address_nv (x1, tmode, 0);
3452 copy_replacements (x1, x);
3455 y = gen_lowpart_common (tmode, y1);
3456 if (y == 0 && GET_CODE (y1) == MEM)
3458 y = adjust_address_nv (y1, tmode, 0);
3459 copy_replacements (y1, y);
3464 x = gen_lowpart (tmode, x);
3465 y = gen_lowpart (tmode, y);
3468 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3469 return emit_insn (GEN_FCN (insn_code) (x, y));
3472 /* This will handle any multi-word or full-word mode that lacks a move_insn
3473 pattern. However, you will get better code if you define such patterns,
3474 even if they must turn into multiple assembler instructions. */
3475 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3482 #ifdef PUSH_ROUNDING
3484 /* If X is a push on the stack, do the push now and replace
3485 X with a reference to the stack pointer. */
3486 if (push_operand (x, GET_MODE (x)))
3491 /* Do not use anti_adjust_stack, since we don't want to update
3492 stack_pointer_delta. */
3493 temp = expand_binop (Pmode,
3494 #ifdef STACK_GROWS_DOWNWARD
3502 (GET_MODE_SIZE (GET_MODE (x)))),
3503 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3505 if (temp != stack_pointer_rtx)
3506 emit_move_insn (stack_pointer_rtx, temp);
3508 code = GET_CODE (XEXP (x, 0));
3510 /* Just hope that small offsets off SP are OK. */
3511 if (code == POST_INC)
3512 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3513 GEN_INT (-((HOST_WIDE_INT)
3514 GET_MODE_SIZE (GET_MODE (x)))));
3515 else if (code == POST_DEC)
3516 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3517 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3519 temp = stack_pointer_rtx;
3521 x = change_address (x, VOIDmode, temp);
3525 /* If we are in reload, see if either operand is a MEM whose address
3526 is scheduled for replacement. */
3527 if (reload_in_progress && GET_CODE (x) == MEM
3528 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3529 x = replace_equiv_address_nv (x, inner);
3530 if (reload_in_progress && GET_CODE (y) == MEM
3531 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3532 y = replace_equiv_address_nv (y, inner);
3538 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3541 rtx xpart = operand_subword (x, i, 1, mode);
3542 rtx ypart = operand_subword (y, i, 1, mode);
3544 /* If we can't get a part of Y, put Y into memory if it is a
3545 constant. Otherwise, force it into a register. If we still
3546 can't get a part of Y, abort. */
3547 if (ypart == 0 && CONSTANT_P (y))
3549 y = force_const_mem (mode, y);
3550 ypart = operand_subword (y, i, 1, mode);
3552 else if (ypart == 0)
3553 ypart = operand_subword_force (y, i, mode);
3555 if (xpart == 0 || ypart == 0)
3558 need_clobber |= (GET_CODE (xpart) == SUBREG);
3560 last_insn = emit_move_insn (xpart, ypart);
3566 /* Show the output dies here. This is necessary for SUBREGs
3567 of pseudos since we cannot track their lifetimes correctly;
3568 hard regs shouldn't appear here except as return values.
3569 We never want to emit such a clobber after reload. */
3571 && ! (reload_in_progress || reload_completed)
3572 && need_clobber != 0)
3573 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3583 /* If Y is representable exactly in a narrower mode, and the target can
3584 perform the extension directly from constant or memory, then emit the
3585 move as an extension. */
3588 compress_float_constant (x, y)
3591 enum machine_mode dstmode = GET_MODE (x);
3592 enum machine_mode orig_srcmode = GET_MODE (y);
3593 enum machine_mode srcmode;
3596 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3598 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3599 srcmode != orig_srcmode;
3600 srcmode = GET_MODE_WIDER_MODE (srcmode))
3603 rtx trunc_y, last_insn;
3605 /* Skip if the target can't extend this way. */
3606 ic = can_extend_p (dstmode, srcmode, 0);
3607 if (ic == CODE_FOR_nothing)
3610 /* Skip if the narrowed value isn't exact. */
3611 if (! exact_real_truncate (srcmode, &r))
3614 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3616 if (LEGITIMATE_CONSTANT_P (trunc_y))
3618 /* Skip if the target needs extra instructions to perform
3620 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3623 else if (float_extend_from_mem[dstmode][srcmode])
3624 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3628 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3629 last_insn = get_last_insn ();
3631 if (GET_CODE (x) == REG)
3632 REG_NOTES (last_insn)
3633 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3641 /* Pushing data onto the stack. */
3643 /* Push a block of length SIZE (perhaps variable)
3644 and return an rtx to address the beginning of the block.
3645 Note that it is not possible for the value returned to be a QUEUED.
3646 The value may be virtual_outgoing_args_rtx.
3648 EXTRA is the number of bytes of padding to push in addition to SIZE.
3649 BELOW nonzero means this padding comes at low addresses;
3650 otherwise, the padding comes at high addresses. */
3653 push_block (size, extra, below)
3659 size = convert_modes (Pmode, ptr_mode, size, 1);
3660 if (CONSTANT_P (size))
3661 anti_adjust_stack (plus_constant (size, extra));
3662 else if (GET_CODE (size) == REG && extra == 0)
3663 anti_adjust_stack (size);
3666 temp = copy_to_mode_reg (Pmode, size);
3668 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3669 temp, 0, OPTAB_LIB_WIDEN);
3670 anti_adjust_stack (temp);
3673 #ifndef STACK_GROWS_DOWNWARD
3679 temp = virtual_outgoing_args_rtx;
3680 if (extra != 0 && below)
3681 temp = plus_constant (temp, extra);
3685 if (GET_CODE (size) == CONST_INT)
3686 temp = plus_constant (virtual_outgoing_args_rtx,
3687 -INTVAL (size) - (below ? 0 : extra));
3688 else if (extra != 0 && !below)
3689 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3690 negate_rtx (Pmode, plus_constant (size, extra)));
3692 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3693 negate_rtx (Pmode, size));
3696 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3699 #ifdef PUSH_ROUNDING
3701 /* Emit single push insn. */
3704 emit_single_push_insn (mode, x, type)
3706 enum machine_mode mode;
3710 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3712 enum insn_code icode;
3713 insn_operand_predicate_fn pred;
3715 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3716 /* If there is push pattern, use it. Otherwise try old way of throwing
3717 MEM representing push operation to move expander. */
3718 icode = push_optab->handlers[(int) mode].insn_code;
3719 if (icode != CODE_FOR_nothing)
3721 if (((pred = insn_data[(int) icode].operand[0].predicate)
3722 && !((*pred) (x, mode))))
3723 x = force_reg (mode, x);
3724 emit_insn (GEN_FCN (icode) (x));
3727 if (GET_MODE_SIZE (mode) == rounded_size)
3728 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3731 #ifdef STACK_GROWS_DOWNWARD
3732 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3733 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3735 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3736 GEN_INT (rounded_size));
3738 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3741 dest = gen_rtx_MEM (mode, dest_addr);
3745 set_mem_attributes (dest, type, 1);
3747 if (flag_optimize_sibling_calls)
3748 /* Function incoming arguments may overlap with sibling call
3749 outgoing arguments and we cannot allow reordering of reads
3750 from function arguments with stores to outgoing arguments
3751 of sibling calls. */
3752 set_mem_alias_set (dest, 0);
3754 emit_move_insn (dest, x);
3758 /* Generate code to push X onto the stack, assuming it has mode MODE and
3760 MODE is redundant except when X is a CONST_INT (since they don't
3762 SIZE is an rtx for the size of data to be copied (in bytes),
3763 needed only if X is BLKmode.
3765 ALIGN (in bits) is maximum alignment we can assume.
3767 If PARTIAL and REG are both nonzero, then copy that many of the first
3768 words of X into registers starting with REG, and push the rest of X.
3769 The amount of space pushed is decreased by PARTIAL words,
3770 rounded *down* to a multiple of PARM_BOUNDARY.
3771 REG must be a hard register in this case.
3772 If REG is zero but PARTIAL is not, take any all others actions for an
3773 argument partially in registers, but do not actually load any
3776 EXTRA is the amount in bytes of extra space to leave next to this arg.
3777 This is ignored if an argument block has already been allocated.
3779 On a machine that lacks real push insns, ARGS_ADDR is the address of
3780 the bottom of the argument block for this call. We use indexing off there
3781 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3782 argument block has not been preallocated.
3784 ARGS_SO_FAR is the size of args previously pushed for this call.
3786 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3787 for arguments passed in registers. If nonzero, it will be the number
3788 of bytes required. */
3791 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3792 args_addr, args_so_far, reg_parm_stack_space,
3795 enum machine_mode mode;
3804 int reg_parm_stack_space;
3808 enum direction stack_direction
3809 #ifdef STACK_GROWS_DOWNWARD
3815 /* Decide where to pad the argument: `downward' for below,
3816 `upward' for above, or `none' for don't pad it.
3817 Default is below for small data on big-endian machines; else above. */
3818 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3820 /* Invert direction if stack is post-decrement.
3822 if (STACK_PUSH_CODE == POST_DEC)
3823 if (where_pad != none)
3824 where_pad = (where_pad == downward ? upward : downward);
3826 xinner = x = protect_from_queue (x, 0);
3828 if (mode == BLKmode)
3830 /* Copy a block into the stack, entirely or partially. */
3833 int used = partial * UNITS_PER_WORD;
3834 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3842 /* USED is now the # of bytes we need not copy to the stack
3843 because registers will take care of them. */
3846 xinner = adjust_address (xinner, BLKmode, used);
3848 /* If the partial register-part of the arg counts in its stack size,
3849 skip the part of stack space corresponding to the registers.
3850 Otherwise, start copying to the beginning of the stack space,
3851 by setting SKIP to 0. */
3852 skip = (reg_parm_stack_space == 0) ? 0 : used;
3854 #ifdef PUSH_ROUNDING
3855 /* Do it with several push insns if that doesn't take lots of insns
3856 and if there is no difficulty with push insns that skip bytes
3857 on the stack for alignment purposes. */
3860 && GET_CODE (size) == CONST_INT
3862 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3863 /* Here we avoid the case of a structure whose weak alignment
3864 forces many pushes of a small amount of data,
3865 and such small pushes do rounding that causes trouble. */
3866 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3867 || align >= BIGGEST_ALIGNMENT
3868 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3869 == (align / BITS_PER_UNIT)))
3870 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3872 /* Push padding now if padding above and stack grows down,
3873 or if padding below and stack grows up.
3874 But if space already allocated, this has already been done. */
3875 if (extra && args_addr == 0
3876 && where_pad != none && where_pad != stack_direction)
3877 anti_adjust_stack (GEN_INT (extra));
3879 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3882 #endif /* PUSH_ROUNDING */
3886 /* Otherwise make space on the stack and copy the data
3887 to the address of that space. */
3889 /* Deduct words put into registers from the size we must copy. */
3892 if (GET_CODE (size) == CONST_INT)
3893 size = GEN_INT (INTVAL (size) - used);
3895 size = expand_binop (GET_MODE (size), sub_optab, size,
3896 GEN_INT (used), NULL_RTX, 0,
3900 /* Get the address of the stack space.
3901 In this case, we do not deal with EXTRA separately.
3902 A single stack adjust will do. */
3905 temp = push_block (size, extra, where_pad == downward);
3908 else if (GET_CODE (args_so_far) == CONST_INT)
3909 temp = memory_address (BLKmode,
3910 plus_constant (args_addr,
3911 skip + INTVAL (args_so_far)));
3913 temp = memory_address (BLKmode,
3914 plus_constant (gen_rtx_PLUS (Pmode,
3919 if (!ACCUMULATE_OUTGOING_ARGS)
3921 /* If the source is referenced relative to the stack pointer,
3922 copy it to another register to stabilize it. We do not need
3923 to do this if we know that we won't be changing sp. */
3925 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3926 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3927 temp = copy_to_reg (temp);
3930 target = gen_rtx_MEM (BLKmode, temp);
3934 set_mem_attributes (target, type, 1);
3935 /* Function incoming arguments may overlap with sibling call
3936 outgoing arguments and we cannot allow reordering of reads
3937 from function arguments with stores to outgoing arguments
3938 of sibling calls. */
3939 set_mem_alias_set (target, 0);
3942 /* ALIGN may well be better aligned than TYPE, e.g. due to
3943 PARM_BOUNDARY. Assume the caller isn't lying. */
3944 set_mem_align (target, align);
3946 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3949 else if (partial > 0)
3951 /* Scalar partly in registers. */
3953 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3956 /* # words of start of argument
3957 that we must make space for but need not store. */
3958 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3959 int args_offset = INTVAL (args_so_far);
3962 /* Push padding now if padding above and stack grows down,
3963 or if padding below and stack grows up.
3964 But if space already allocated, this has already been done. */
3965 if (extra && args_addr == 0
3966 && where_pad != none && where_pad != stack_direction)
3967 anti_adjust_stack (GEN_INT (extra));
3969 /* If we make space by pushing it, we might as well push
3970 the real data. Otherwise, we can leave OFFSET nonzero
3971 and leave the space uninitialized. */
3975 /* Now NOT_STACK gets the number of words that we don't need to
3976 allocate on the stack. */
3977 not_stack = partial - offset;
3979 /* If the partial register-part of the arg counts in its stack size,
3980 skip the part of stack space corresponding to the registers.
3981 Otherwise, start copying to the beginning of the stack space,
3982 by setting SKIP to 0. */
3983 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3985 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3986 x = validize_mem (force_const_mem (mode, x));
3988 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3989 SUBREGs of such registers are not allowed. */
3990 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3991 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3992 x = copy_to_reg (x);
3994 /* Loop over all the words allocated on the stack for this arg. */
3995 /* We can do it by words, because any scalar bigger than a word
3996 has a size a multiple of a word. */
3997 #ifndef PUSH_ARGS_REVERSED
3998 for (i = not_stack; i < size; i++)
4000 for (i = size - 1; i >= not_stack; i--)
4002 if (i >= not_stack + offset)
4003 emit_push_insn (operand_subword_force (x, i, mode),
4004 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4006 GEN_INT (args_offset + ((i - not_stack + skip)
4008 reg_parm_stack_space, alignment_pad);
4015 /* Push padding now if padding above and stack grows down,
4016 or if padding below and stack grows up.
4017 But if space already allocated, this has already been done. */
4018 if (extra && args_addr == 0
4019 && where_pad != none && where_pad != stack_direction)
4020 anti_adjust_stack (GEN_INT (extra));
4022 #ifdef PUSH_ROUNDING
4023 if (args_addr == 0 && PUSH_ARGS)
4024 emit_single_push_insn (mode, x, type);
4028 if (GET_CODE (args_so_far) == CONST_INT)
4030 = memory_address (mode,
4031 plus_constant (args_addr,
4032 INTVAL (args_so_far)));
4034 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4036 dest = gen_rtx_MEM (mode, addr);
4039 set_mem_attributes (dest, type, 1);
4040 /* Function incoming arguments may overlap with sibling call
4041 outgoing arguments and we cannot allow reordering of reads
4042 from function arguments with stores to outgoing arguments
4043 of sibling calls. */
4044 set_mem_alias_set (dest, 0);
4047 emit_move_insn (dest, x);
4051 /* If part should go in registers, copy that part
4052 into the appropriate registers. Do this now, at the end,
4053 since mem-to-mem copies above may do function calls. */
4054 if (partial > 0 && reg != 0)
4056 /* Handle calls that pass values in multiple non-contiguous locations.
4057 The Irix 6 ABI has examples of this. */
4058 if (GET_CODE (reg) == PARALLEL)
4059 emit_group_load (reg, x, -1); /* ??? size? */
4061 move_block_to_reg (REGNO (reg), x, partial, mode);
4064 if (extra && args_addr == 0 && where_pad == stack_direction)
4065 anti_adjust_stack (GEN_INT (extra));
4067 if (alignment_pad && args_addr == 0)
4068 anti_adjust_stack (alignment_pad);
4071 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4079 /* Only registers can be subtargets. */
4080 || GET_CODE (x) != REG
4081 /* If the register is readonly, it can't be set more than once. */
4082 || RTX_UNCHANGING_P (x)
4083 /* Don't use hard regs to avoid extending their life. */
4084 || REGNO (x) < FIRST_PSEUDO_REGISTER
4085 /* Avoid subtargets inside loops,
4086 since they hide some invariant expressions. */
4087 || preserve_subexpressions_p ())
4091 /* Expand an assignment that stores the value of FROM into TO.
4092 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4093 (This may contain a QUEUED rtx;
4094 if the value is constant, this rtx is a constant.)
4095 Otherwise, the returned value is NULL_RTX.
4097 SUGGEST_REG is no longer actually used.
4098 It used to mean, copy the value through a register
4099 and return that register, if that is possible.
4100 We now use WANT_VALUE to decide whether to do this. */
4103 expand_assignment (to, from, want_value, suggest_reg)
4106 int suggest_reg ATTRIBUTE_UNUSED;
4111 /* Don't crash if the lhs of the assignment was erroneous. */
4113 if (TREE_CODE (to) == ERROR_MARK)
4115 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4116 return want_value ? result : NULL_RTX;
4119 /* Assignment of a structure component needs special treatment
4120 if the structure component's rtx is not simply a MEM.
4121 Assignment of an array element at a constant index, and assignment of
4122 an array element in an unaligned packed structure field, has the same
4125 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4126 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4127 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4129 enum machine_mode mode1;
4130 HOST_WIDE_INT bitsize, bitpos;
4138 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4139 &unsignedp, &volatilep);
4141 /* If we are going to use store_bit_field and extract_bit_field,
4142 make sure to_rtx will be safe for multiple use. */
4144 if (mode1 == VOIDmode && want_value)
4145 tem = stabilize_reference (tem);
4147 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4151 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4153 if (GET_CODE (to_rtx) != MEM)
4156 #ifdef POINTERS_EXTEND_UNSIGNED
4157 if (GET_MODE (offset_rtx) != Pmode)
4158 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4160 if (GET_MODE (offset_rtx) != ptr_mode)
4161 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4164 /* A constant address in TO_RTX can have VOIDmode, we must not try
4165 to call force_reg for that case. Avoid that case. */
4166 if (GET_CODE (to_rtx) == MEM
4167 && GET_MODE (to_rtx) == BLKmode
4168 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4170 && (bitpos % bitsize) == 0
4171 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4172 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4174 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4178 to_rtx = offset_address (to_rtx, offset_rtx,
4179 highest_pow2_factor_for_type (TREE_TYPE (to),
4183 if (GET_CODE (to_rtx) == MEM)
4185 /* If the field is at offset zero, we could have been given the
4186 DECL_RTX of the parent struct. Don't munge it. */
4187 to_rtx = shallow_copy_rtx (to_rtx);
4189 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4192 /* Deal with volatile and readonly fields. The former is only done
4193 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4194 if (volatilep && GET_CODE (to_rtx) == MEM)
4196 if (to_rtx == orig_to_rtx)
4197 to_rtx = copy_rtx (to_rtx);
4198 MEM_VOLATILE_P (to_rtx) = 1;
4201 if (TREE_CODE (to) == COMPONENT_REF
4202 && TREE_READONLY (TREE_OPERAND (to, 1)))
4204 if (to_rtx == orig_to_rtx)
4205 to_rtx = copy_rtx (to_rtx);
4206 RTX_UNCHANGING_P (to_rtx) = 1;
4209 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4211 if (to_rtx == orig_to_rtx)
4212 to_rtx = copy_rtx (to_rtx);
4213 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4216 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4218 /* Spurious cast for HPUX compiler. */
4219 ? ((enum machine_mode)
4220 TYPE_MODE (TREE_TYPE (to)))
4222 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4224 preserve_temp_slots (result);
4228 /* If the value is meaningful, convert RESULT to the proper mode.
4229 Otherwise, return nothing. */
4230 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4231 TYPE_MODE (TREE_TYPE (from)),
4233 TREE_UNSIGNED (TREE_TYPE (to)))
4237 /* If the rhs is a function call and its value is not an aggregate,
4238 call the function before we start to compute the lhs.
4239 This is needed for correct code for cases such as
4240 val = setjmp (buf) on machines where reference to val
4241 requires loading up part of an address in a separate insn.
4243 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4244 since it might be a promoted variable where the zero- or sign- extension
4245 needs to be done. Handling this in the normal way is safe because no
4246 computation is done before the call. */
4247 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4248 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4249 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4250 && GET_CODE (DECL_RTL (to)) == REG))
4255 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4257 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4259 /* Handle calls that return values in multiple non-contiguous locations.
4260 The Irix 6 ABI has examples of this. */
4261 if (GET_CODE (to_rtx) == PARALLEL)
4262 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4263 else if (GET_MODE (to_rtx) == BLKmode)
4264 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4267 #ifdef POINTERS_EXTEND_UNSIGNED
4268 if (POINTER_TYPE_P (TREE_TYPE (to))
4269 && GET_MODE (to_rtx) != GET_MODE (value))
4270 value = convert_memory_address (GET_MODE (to_rtx), value);
4272 emit_move_insn (to_rtx, value);
4274 preserve_temp_slots (to_rtx);
4277 return want_value ? to_rtx : NULL_RTX;
4280 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4281 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4284 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4286 /* Don't move directly into a return register. */
4287 if (TREE_CODE (to) == RESULT_DECL
4288 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4293 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4295 if (GET_CODE (to_rtx) == PARALLEL)
4296 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4298 emit_move_insn (to_rtx, temp);
4300 preserve_temp_slots (to_rtx);
4303 return want_value ? to_rtx : NULL_RTX;
4306 /* In case we are returning the contents of an object which overlaps
4307 the place the value is being stored, use a safe function when copying
4308 a value through a pointer into a structure value return block. */
4309 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4310 && current_function_returns_struct
4311 && !current_function_returns_pcc_struct)
4316 size = expr_size (from);
4317 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4319 if (TARGET_MEM_FUNCTIONS)
4320 emit_library_call (memmove_libfunc, LCT_NORMAL,
4321 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4322 XEXP (from_rtx, 0), Pmode,
4323 convert_to_mode (TYPE_MODE (sizetype),
4324 size, TREE_UNSIGNED (sizetype)),
4325 TYPE_MODE (sizetype));
4327 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4328 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4329 XEXP (to_rtx, 0), Pmode,
4330 convert_to_mode (TYPE_MODE (integer_type_node),
4332 TREE_UNSIGNED (integer_type_node)),
4333 TYPE_MODE (integer_type_node));
4335 preserve_temp_slots (to_rtx);
4338 return want_value ? to_rtx : NULL_RTX;
4341 /* Compute FROM and store the value in the rtx we got. */
4344 result = store_expr (from, to_rtx, want_value);
4345 preserve_temp_slots (result);
4348 return want_value ? result : NULL_RTX;
4351 /* Generate code for computing expression EXP,
4352 and storing the value into TARGET.
4353 TARGET may contain a QUEUED rtx.
4355 If WANT_VALUE & 1 is nonzero, return a copy of the value
4356 not in TARGET, so that we can be sure to use the proper
4357 value in a containing expression even if TARGET has something
4358 else stored in it. If possible, we copy the value through a pseudo
4359 and return that pseudo. Or, if the value is constant, we try to
4360 return the constant. In some cases, we return a pseudo
4361 copied *from* TARGET.
4363 If the mode is BLKmode then we may return TARGET itself.
4364 It turns out that in BLKmode it doesn't cause a problem.
4365 because C has no operators that could combine two different
4366 assignments into the same BLKmode object with different values
4367 with no sequence point. Will other languages need this to
4370 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4371 to catch quickly any cases where the caller uses the value
4372 and fails to set WANT_VALUE.
4374 If WANT_VALUE & 2 is set, this is a store into a call param on the
4375 stack, and block moves may need to be treated specially. */
4378 store_expr (exp, target, want_value)
4384 int dont_return_target = 0;
4385 int dont_store_target = 0;
4387 if (VOID_TYPE_P (TREE_TYPE (exp)))
4389 /* C++ can generate ?: expressions with a throw expression in one
4390 branch and an rvalue in the other. Here, we resolve attempts to
4391 store the throw expression's nonexistant result. */
4394 expand_expr (exp, const0_rtx, VOIDmode, 0);
4397 if (TREE_CODE (exp) == COMPOUND_EXPR)
4399 /* Perform first part of compound expression, then assign from second
4401 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4402 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4404 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4406 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4408 /* For conditional expression, get safe form of the target. Then
4409 test the condition, doing the appropriate assignment on either
4410 side. This avoids the creation of unnecessary temporaries.
4411 For non-BLKmode, it is more efficient not to do this. */
4413 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4416 target = protect_from_queue (target, 1);
4418 do_pending_stack_adjust ();
4420 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4421 start_cleanup_deferral ();
4422 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4423 end_cleanup_deferral ();
4425 emit_jump_insn (gen_jump (lab2));
4428 start_cleanup_deferral ();
4429 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4430 end_cleanup_deferral ();
4435 return want_value & 1 ? target : NULL_RTX;
4437 else if (queued_subexp_p (target))
4438 /* If target contains a postincrement, let's not risk
4439 using it as the place to generate the rhs. */
4441 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4443 /* Expand EXP into a new pseudo. */
4444 temp = gen_reg_rtx (GET_MODE (target));
4445 temp = expand_expr (exp, temp, GET_MODE (target),
4447 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4450 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4452 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4454 /* If target is volatile, ANSI requires accessing the value
4455 *from* the target, if it is accessed. So make that happen.
4456 In no case return the target itself. */
4457 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4458 dont_return_target = 1;
4460 else if ((want_value & 1) != 0
4461 && GET_CODE (target) == MEM
4462 && ! MEM_VOLATILE_P (target)
4463 && GET_MODE (target) != BLKmode)
4464 /* If target is in memory and caller wants value in a register instead,
4465 arrange that. Pass TARGET as target for expand_expr so that,
4466 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4467 We know expand_expr will not use the target in that case.
4468 Don't do this if TARGET is volatile because we are supposed
4469 to write it and then read it. */
4471 temp = expand_expr (exp, target, GET_MODE (target),
4472 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4473 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4475 /* If TEMP is already in the desired TARGET, only copy it from
4476 memory and don't store it there again. */
4478 || (rtx_equal_p (temp, target)
4479 && ! side_effects_p (temp) && ! side_effects_p (target)))
4480 dont_store_target = 1;
4481 temp = copy_to_reg (temp);
4483 dont_return_target = 1;
4485 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4486 /* If this is a scalar in a register that is stored in a wider mode
4487 than the declared mode, compute the result into its declared mode
4488 and then convert to the wider mode. Our value is the computed
4491 rtx inner_target = 0;
4493 /* If we don't want a value, we can do the conversion inside EXP,
4494 which will often result in some optimizations. Do the conversion
4495 in two steps: first change the signedness, if needed, then
4496 the extend. But don't do this if the type of EXP is a subtype
4497 of something else since then the conversion might involve
4498 more than just converting modes. */
4499 if ((want_value & 1) == 0
4500 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4501 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4503 if (TREE_UNSIGNED (TREE_TYPE (exp))
4504 != SUBREG_PROMOTED_UNSIGNED_P (target))
4506 ((*lang_hooks.types.signed_or_unsigned_type)
4507 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4509 exp = convert ((*lang_hooks.types.type_for_mode)
4510 (GET_MODE (SUBREG_REG (target)),
4511 SUBREG_PROMOTED_UNSIGNED_P (target)),
4514 inner_target = SUBREG_REG (target);
4517 temp = expand_expr (exp, inner_target, VOIDmode,
4518 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4520 /* If TEMP is a MEM and we want a result value, make the access
4521 now so it gets done only once. Strictly speaking, this is
4522 only necessary if the MEM is volatile, or if the address
4523 overlaps TARGET. But not performing the load twice also
4524 reduces the amount of rtl we generate and then have to CSE. */
4525 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4526 temp = copy_to_reg (temp);
4528 /* If TEMP is a VOIDmode constant, use convert_modes to make
4529 sure that we properly convert it. */
4530 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4532 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4533 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4534 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4535 GET_MODE (target), temp,
4536 SUBREG_PROMOTED_UNSIGNED_P (target));
4539 convert_move (SUBREG_REG (target), temp,
4540 SUBREG_PROMOTED_UNSIGNED_P (target));
4542 /* If we promoted a constant, change the mode back down to match
4543 target. Otherwise, the caller might get confused by a result whose
4544 mode is larger than expected. */
4546 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4548 if (GET_MODE (temp) != VOIDmode)
4550 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4551 SUBREG_PROMOTED_VAR_P (temp) = 1;
4552 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4553 SUBREG_PROMOTED_UNSIGNED_P (target));
4556 temp = convert_modes (GET_MODE (target),
4557 GET_MODE (SUBREG_REG (target)),
4558 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4561 return want_value & 1 ? temp : NULL_RTX;
4565 temp = expand_expr (exp, target, GET_MODE (target),
4566 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4567 /* Return TARGET if it's a specified hardware register.
4568 If TARGET is a volatile mem ref, either return TARGET
4569 or return a reg copied *from* TARGET; ANSI requires this.
4571 Otherwise, if TEMP is not TARGET, return TEMP
4572 if it is constant (for efficiency),
4573 or if we really want the correct value. */
4574 if (!(target && GET_CODE (target) == REG
4575 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4576 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4577 && ! rtx_equal_p (temp, target)
4578 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4579 dont_return_target = 1;
4582 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4583 the same as that of TARGET, adjust the constant. This is needed, for
4584 example, in case it is a CONST_DOUBLE and we want only a word-sized
4586 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4587 && TREE_CODE (exp) != ERROR_MARK
4588 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4589 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4590 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4592 /* If value was not generated in the target, store it there.
4593 Convert the value to TARGET's type first if necessary.
4594 If TEMP and TARGET compare equal according to rtx_equal_p, but
4595 one or both of them are volatile memory refs, we have to distinguish
4597 - expand_expr has used TARGET. In this case, we must not generate
4598 another copy. This can be detected by TARGET being equal according
4600 - expand_expr has not used TARGET - that means that the source just
4601 happens to have the same RTX form. Since temp will have been created
4602 by expand_expr, it will compare unequal according to == .
4603 We must generate a copy in this case, to reach the correct number
4604 of volatile memory references. */
4606 if ((! rtx_equal_p (temp, target)
4607 || (temp != target && (side_effects_p (temp)
4608 || side_effects_p (target))))
4609 && TREE_CODE (exp) != ERROR_MARK
4610 && ! dont_store_target
4611 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4612 but TARGET is not valid memory reference, TEMP will differ
4613 from TARGET although it is really the same location. */
4614 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4615 || target != DECL_RTL_IF_SET (exp))
4616 /* If there's nothing to copy, don't bother. Don't call expr_size
4617 unless necessary, because some front-ends (C++) expr_size-hook
4618 aborts on objects that are not supposed to be bit-copied or
4620 && expr_size (exp) != const0_rtx)
4622 target = protect_from_queue (target, 1);
4623 if (GET_MODE (temp) != GET_MODE (target)
4624 && GET_MODE (temp) != VOIDmode)
4626 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4627 if (dont_return_target)
4629 /* In this case, we will return TEMP,
4630 so make sure it has the proper mode.
4631 But don't forget to store the value into TARGET. */
4632 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4633 emit_move_insn (target, temp);
4636 convert_move (target, temp, unsignedp);
4639 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4641 /* Handle copying a string constant into an array. The string
4642 constant may be shorter than the array. So copy just the string's
4643 actual length, and clear the rest. First get the size of the data
4644 type of the string, which is actually the size of the target. */
4645 rtx size = expr_size (exp);
4647 if (GET_CODE (size) == CONST_INT
4648 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4649 emit_block_move (target, temp, size,
4651 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4654 /* Compute the size of the data to copy from the string. */
4656 = size_binop (MIN_EXPR,
4657 make_tree (sizetype, size),
4658 size_int (TREE_STRING_LENGTH (exp)));
4660 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4662 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4665 /* Copy that much. */
4666 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4667 TREE_UNSIGNED (sizetype));
4668 emit_block_move (target, temp, copy_size_rtx,
4670 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4672 /* Figure out how much is left in TARGET that we have to clear.
4673 Do all calculations in ptr_mode. */
4674 if (GET_CODE (copy_size_rtx) == CONST_INT)
4676 size = plus_constant (size, -INTVAL (copy_size_rtx));
4677 target = adjust_address (target, BLKmode,
4678 INTVAL (copy_size_rtx));
4682 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4683 copy_size_rtx, NULL_RTX, 0,
4686 #ifdef POINTERS_EXTEND_UNSIGNED
4687 if (GET_MODE (copy_size_rtx) != Pmode)
4688 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4689 TREE_UNSIGNED (sizetype));
4692 target = offset_address (target, copy_size_rtx,
4693 highest_pow2_factor (copy_size));
4694 label = gen_label_rtx ();
4695 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4696 GET_MODE (size), 0, label);
4699 if (size != const0_rtx)
4700 clear_storage (target, size);
4706 /* Handle calls that return values in multiple non-contiguous locations.
4707 The Irix 6 ABI has examples of this. */
4708 else if (GET_CODE (target) == PARALLEL)
4709 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4710 else if (GET_MODE (temp) == BLKmode)
4711 emit_block_move (target, temp, expr_size (exp),
4713 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4715 emit_move_insn (target, temp);
4718 /* If we don't want a value, return NULL_RTX. */
4719 if ((want_value & 1) == 0)
4722 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4723 ??? The latter test doesn't seem to make sense. */
4724 else if (dont_return_target && GET_CODE (temp) != MEM)
4727 /* Return TARGET itself if it is a hard register. */
4728 else if ((want_value & 1) != 0
4729 && GET_MODE (target) != BLKmode
4730 && ! (GET_CODE (target) == REG
4731 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4732 return copy_to_reg (target);
4738 /* Return 1 if EXP just contains zeros. */
4746 switch (TREE_CODE (exp))
4750 case NON_LVALUE_EXPR:
4751 case VIEW_CONVERT_EXPR:
4752 return is_zeros_p (TREE_OPERAND (exp, 0));
4755 return integer_zerop (exp);
4759 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4762 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4765 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4766 elt = TREE_CHAIN (elt))
4767 if (!is_zeros_p (TREE_VALUE (elt)))
4773 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4774 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4775 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4776 if (! is_zeros_p (TREE_VALUE (elt)))
4786 /* Return 1 if EXP contains mostly (3/4) zeros. */
4789 mostly_zeros_p (exp)
4792 if (TREE_CODE (exp) == CONSTRUCTOR)
4794 int elts = 0, zeros = 0;
4795 tree elt = CONSTRUCTOR_ELTS (exp);
4796 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4798 /* If there are no ranges of true bits, it is all zero. */
4799 return elt == NULL_TREE;
4801 for (; elt; elt = TREE_CHAIN (elt))
4803 /* We do not handle the case where the index is a RANGE_EXPR,
4804 so the statistic will be somewhat inaccurate.
4805 We do make a more accurate count in store_constructor itself,
4806 so since this function is only used for nested array elements,
4807 this should be close enough. */
4808 if (mostly_zeros_p (TREE_VALUE (elt)))
4813 return 4 * zeros >= 3 * elts;
4816 return is_zeros_p (exp);
4819 /* Helper function for store_constructor.
4820 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4821 TYPE is the type of the CONSTRUCTOR, not the element type.
4822 CLEARED is as for store_constructor.
4823 ALIAS_SET is the alias set to use for any stores.
4825 This provides a recursive shortcut back to store_constructor when it isn't
4826 necessary to go through store_field. This is so that we can pass through
4827 the cleared field to let store_constructor know that we may not have to
4828 clear a substructure if the outer structure has already been cleared. */
4831 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4834 unsigned HOST_WIDE_INT bitsize;
4835 HOST_WIDE_INT bitpos;
4836 enum machine_mode mode;
4841 if (TREE_CODE (exp) == CONSTRUCTOR
4842 && bitpos % BITS_PER_UNIT == 0
4843 /* If we have a nonzero bitpos for a register target, then we just
4844 let store_field do the bitfield handling. This is unlikely to
4845 generate unnecessary clear instructions anyways. */
4846 && (bitpos == 0 || GET_CODE (target) == MEM))
4848 if (GET_CODE (target) == MEM)
4850 = adjust_address (target,
4851 GET_MODE (target) == BLKmode
4853 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4854 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4857 /* Update the alias set, if required. */
4858 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4859 && MEM_ALIAS_SET (target) != 0)
4861 target = copy_rtx (target);
4862 set_mem_alias_set (target, alias_set);
4865 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4868 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4872 /* Store the value of constructor EXP into the rtx TARGET.
4873 TARGET is either a REG or a MEM; we know it cannot conflict, since
4874 safe_from_p has been called.
4875 CLEARED is true if TARGET is known to have been zero'd.
4876 SIZE is the number of bytes of TARGET we are allowed to modify: this
4877 may not be the same as the size of EXP if we are assigning to a field
4878 which has been packed to exclude padding bits. */
4881 store_constructor (exp, target, cleared, size)
4887 tree type = TREE_TYPE (exp);
4888 #ifdef WORD_REGISTER_OPERATIONS
4889 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4892 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4893 || TREE_CODE (type) == QUAL_UNION_TYPE)
4897 /* We either clear the aggregate or indicate the value is dead. */
4898 if ((TREE_CODE (type) == UNION_TYPE
4899 || TREE_CODE (type) == QUAL_UNION_TYPE)
4901 && ! CONSTRUCTOR_ELTS (exp))
4902 /* If the constructor is empty, clear the union. */
4904 clear_storage (target, expr_size (exp));
4908 /* If we are building a static constructor into a register,
4909 set the initial value as zero so we can fold the value into
4910 a constant. But if more than one register is involved,
4911 this probably loses. */
4912 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4913 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4915 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4919 /* If the constructor has fewer fields than the structure
4920 or if we are initializing the structure to mostly zeros,
4921 clear the whole structure first. Don't do this if TARGET is a
4922 register whose mode size isn't equal to SIZE since clear_storage
4923 can't handle this case. */
4924 else if (! cleared && size > 0
4925 && ((list_length (CONSTRUCTOR_ELTS (exp))
4926 != fields_length (type))
4927 || mostly_zeros_p (exp))
4928 && (GET_CODE (target) != REG
4929 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4932 rtx xtarget = target;
4934 if (readonly_fields_p (type))
4936 xtarget = copy_rtx (xtarget);
4937 RTX_UNCHANGING_P (xtarget) = 1;
4940 clear_storage (xtarget, GEN_INT (size));
4945 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4947 /* Store each element of the constructor into
4948 the corresponding field of TARGET. */
4950 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4952 tree field = TREE_PURPOSE (elt);
4953 tree value = TREE_VALUE (elt);
4954 enum machine_mode mode;
4955 HOST_WIDE_INT bitsize;
4956 HOST_WIDE_INT bitpos = 0;
4958 rtx to_rtx = target;
4960 /* Just ignore missing fields.
4961 We cleared the whole structure, above,
4962 if any fields are missing. */
4966 if (cleared && is_zeros_p (value))
4969 if (host_integerp (DECL_SIZE (field), 1))
4970 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4974 mode = DECL_MODE (field);
4975 if (DECL_BIT_FIELD (field))
4978 offset = DECL_FIELD_OFFSET (field);
4979 if (host_integerp (offset, 0)
4980 && host_integerp (bit_position (field), 0))
4982 bitpos = int_bit_position (field);
4986 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4992 if (contains_placeholder_p (offset))
4993 offset = build (WITH_RECORD_EXPR, sizetype,
4994 offset, make_tree (TREE_TYPE (exp), target));
4996 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4997 if (GET_CODE (to_rtx) != MEM)
5000 #ifdef POINTERS_EXTEND_UNSIGNED
5001 if (GET_MODE (offset_rtx) != Pmode)
5002 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5004 if (GET_MODE (offset_rtx) != ptr_mode)
5005 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5008 to_rtx = offset_address (to_rtx, offset_rtx,
5009 highest_pow2_factor (offset));
5012 if (TREE_READONLY (field))
5014 if (GET_CODE (to_rtx) == MEM)
5015 to_rtx = copy_rtx (to_rtx);
5017 RTX_UNCHANGING_P (to_rtx) = 1;
5020 #ifdef WORD_REGISTER_OPERATIONS
5021 /* If this initializes a field that is smaller than a word, at the
5022 start of a word, try to widen it to a full word.
5023 This special case allows us to output C++ member function
5024 initializations in a form that the optimizers can understand. */
5025 if (GET_CODE (target) == REG
5026 && bitsize < BITS_PER_WORD
5027 && bitpos % BITS_PER_WORD == 0
5028 && GET_MODE_CLASS (mode) == MODE_INT
5029 && TREE_CODE (value) == INTEGER_CST
5031 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5033 tree type = TREE_TYPE (value);
5035 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5037 type = (*lang_hooks.types.type_for_size)
5038 (BITS_PER_WORD, TREE_UNSIGNED (type));
5039 value = convert (type, value);
5042 if (BYTES_BIG_ENDIAN)
5044 = fold (build (LSHIFT_EXPR, type, value,
5045 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5046 bitsize = BITS_PER_WORD;
5051 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5052 && DECL_NONADDRESSABLE_P (field))
5054 to_rtx = copy_rtx (to_rtx);
5055 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5058 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5059 value, type, cleared,
5060 get_alias_set (TREE_TYPE (field)));
5063 else if (TREE_CODE (type) == ARRAY_TYPE
5064 || TREE_CODE (type) == VECTOR_TYPE)
5069 tree domain = TYPE_DOMAIN (type);
5070 tree elttype = TREE_TYPE (type);
5072 HOST_WIDE_INT minelt = 0;
5073 HOST_WIDE_INT maxelt = 0;
5075 /* Vectors are like arrays, but the domain is stored via an array
5077 if (TREE_CODE (type) == VECTOR_TYPE)
5079 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5080 the same field as TYPE_DOMAIN, we are not guaranteed that
5082 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5083 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5086 const_bounds_p = (TYPE_MIN_VALUE (domain)
5087 && TYPE_MAX_VALUE (domain)
5088 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5089 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5091 /* If we have constant bounds for the range of the type, get them. */
5094 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5095 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5098 /* If the constructor has fewer elements than the array,
5099 clear the whole array first. Similarly if this is
5100 static constructor of a non-BLKmode object. */
5101 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5105 HOST_WIDE_INT count = 0, zero_count = 0;
5106 need_to_clear = ! const_bounds_p;
5108 /* This loop is a more accurate version of the loop in
5109 mostly_zeros_p (it handles RANGE_EXPR in an index).
5110 It is also needed to check for missing elements. */
5111 for (elt = CONSTRUCTOR_ELTS (exp);
5112 elt != NULL_TREE && ! need_to_clear;
5113 elt = TREE_CHAIN (elt))
5115 tree index = TREE_PURPOSE (elt);
5116 HOST_WIDE_INT this_node_count;
5118 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5120 tree lo_index = TREE_OPERAND (index, 0);
5121 tree hi_index = TREE_OPERAND (index, 1);
5123 if (! host_integerp (lo_index, 1)
5124 || ! host_integerp (hi_index, 1))
5130 this_node_count = (tree_low_cst (hi_index, 1)
5131 - tree_low_cst (lo_index, 1) + 1);
5134 this_node_count = 1;
5136 count += this_node_count;
5137 if (mostly_zeros_p (TREE_VALUE (elt)))
5138 zero_count += this_node_count;
5141 /* Clear the entire array first if there are any missing elements,
5142 or if the incidence of zero elements is >= 75%. */
5144 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5148 if (need_to_clear && size > 0)
5153 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5155 clear_storage (target, GEN_INT (size));
5159 else if (REG_P (target))
5160 /* Inform later passes that the old value is dead. */
5161 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5163 /* Store each element of the constructor into
5164 the corresponding element of TARGET, determined
5165 by counting the elements. */
5166 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5168 elt = TREE_CHAIN (elt), i++)
5170 enum machine_mode mode;
5171 HOST_WIDE_INT bitsize;
5172 HOST_WIDE_INT bitpos;
5174 tree value = TREE_VALUE (elt);
5175 tree index = TREE_PURPOSE (elt);
5176 rtx xtarget = target;
5178 if (cleared && is_zeros_p (value))
5181 unsignedp = TREE_UNSIGNED (elttype);
5182 mode = TYPE_MODE (elttype);
5183 if (mode == BLKmode)
5184 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5185 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5188 bitsize = GET_MODE_BITSIZE (mode);
5190 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5192 tree lo_index = TREE_OPERAND (index, 0);
5193 tree hi_index = TREE_OPERAND (index, 1);
5194 rtx index_r, pos_rtx, loop_end;
5195 struct nesting *loop;
5196 HOST_WIDE_INT lo, hi, count;
5199 /* If the range is constant and "small", unroll the loop. */
5201 && host_integerp (lo_index, 0)
5202 && host_integerp (hi_index, 0)
5203 && (lo = tree_low_cst (lo_index, 0),
5204 hi = tree_low_cst (hi_index, 0),
5205 count = hi - lo + 1,
5206 (GET_CODE (target) != MEM
5208 || (host_integerp (TYPE_SIZE (elttype), 1)
5209 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5212 lo -= minelt; hi -= minelt;
5213 for (; lo <= hi; lo++)
5215 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5217 if (GET_CODE (target) == MEM
5218 && !MEM_KEEP_ALIAS_SET_P (target)
5219 && TREE_CODE (type) == ARRAY_TYPE
5220 && TYPE_NONALIASED_COMPONENT (type))
5222 target = copy_rtx (target);
5223 MEM_KEEP_ALIAS_SET_P (target) = 1;
5226 store_constructor_field
5227 (target, bitsize, bitpos, mode, value, type, cleared,
5228 get_alias_set (elttype));
5233 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5234 loop_end = gen_label_rtx ();
5236 unsignedp = TREE_UNSIGNED (domain);
5238 index = build_decl (VAR_DECL, NULL_TREE, domain);
5241 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5243 SET_DECL_RTL (index, index_r);
5244 if (TREE_CODE (value) == SAVE_EXPR
5245 && SAVE_EXPR_RTL (value) == 0)
5247 /* Make sure value gets expanded once before the
5249 expand_expr (value, const0_rtx, VOIDmode, 0);
5252 store_expr (lo_index, index_r, 0);
5253 loop = expand_start_loop (0);
5255 /* Assign value to element index. */
5257 = convert (ssizetype,
5258 fold (build (MINUS_EXPR, TREE_TYPE (index),
5259 index, TYPE_MIN_VALUE (domain))));
5260 position = size_binop (MULT_EXPR, position,
5262 TYPE_SIZE_UNIT (elttype)));
5264 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5265 xtarget = offset_address (target, pos_rtx,
5266 highest_pow2_factor (position));
5267 xtarget = adjust_address (xtarget, mode, 0);
5268 if (TREE_CODE (value) == CONSTRUCTOR)
5269 store_constructor (value, xtarget, cleared,
5270 bitsize / BITS_PER_UNIT);
5272 store_expr (value, xtarget, 0);
5274 expand_exit_loop_if_false (loop,
5275 build (LT_EXPR, integer_type_node,
5278 expand_increment (build (PREINCREMENT_EXPR,
5280 index, integer_one_node), 0, 0);
5282 emit_label (loop_end);
5285 else if ((index != 0 && ! host_integerp (index, 0))
5286 || ! host_integerp (TYPE_SIZE (elttype), 1))
5291 index = ssize_int (1);
5294 index = convert (ssizetype,
5295 fold (build (MINUS_EXPR, index,
5296 TYPE_MIN_VALUE (domain))));
5298 position = size_binop (MULT_EXPR, index,
5300 TYPE_SIZE_UNIT (elttype)));
5301 xtarget = offset_address (target,
5302 expand_expr (position, 0, VOIDmode, 0),
5303 highest_pow2_factor (position));
5304 xtarget = adjust_address (xtarget, mode, 0);
5305 store_expr (value, xtarget, 0);
5310 bitpos = ((tree_low_cst (index, 0) - minelt)
5311 * tree_low_cst (TYPE_SIZE (elttype), 1));
5313 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5315 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5316 && TREE_CODE (type) == ARRAY_TYPE
5317 && TYPE_NONALIASED_COMPONENT (type))
5319 target = copy_rtx (target);
5320 MEM_KEEP_ALIAS_SET_P (target) = 1;
5323 store_constructor_field (target, bitsize, bitpos, mode, value,
5324 type, cleared, get_alias_set (elttype));
5330 /* Set constructor assignments. */
5331 else if (TREE_CODE (type) == SET_TYPE)
5333 tree elt = CONSTRUCTOR_ELTS (exp);
5334 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5335 tree domain = TYPE_DOMAIN (type);
5336 tree domain_min, domain_max, bitlength;
5338 /* The default implementation strategy is to extract the constant
5339 parts of the constructor, use that to initialize the target,
5340 and then "or" in whatever non-constant ranges we need in addition.
5342 If a large set is all zero or all ones, it is
5343 probably better to set it using memset (if available) or bzero.
5344 Also, if a large set has just a single range, it may also be
5345 better to first clear all the first clear the set (using
5346 bzero/memset), and set the bits we want. */
5348 /* Check for all zeros. */
5349 if (elt == NULL_TREE && size > 0)
5352 clear_storage (target, GEN_INT (size));
5356 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5357 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5358 bitlength = size_binop (PLUS_EXPR,
5359 size_diffop (domain_max, domain_min),
5362 nbits = tree_low_cst (bitlength, 1);
5364 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5365 are "complicated" (more than one range), initialize (the
5366 constant parts) by copying from a constant. */
5367 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5368 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5370 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5371 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5372 char *bit_buffer = (char *) alloca (nbits);
5373 HOST_WIDE_INT word = 0;
5374 unsigned int bit_pos = 0;
5375 unsigned int ibit = 0;
5376 unsigned int offset = 0; /* In bytes from beginning of set. */
5378 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5381 if (bit_buffer[ibit])
5383 if (BYTES_BIG_ENDIAN)
5384 word |= (1 << (set_word_size - 1 - bit_pos));
5386 word |= 1 << bit_pos;
5390 if (bit_pos >= set_word_size || ibit == nbits)
5392 if (word != 0 || ! cleared)
5394 rtx datum = GEN_INT (word);
5397 /* The assumption here is that it is safe to use
5398 XEXP if the set is multi-word, but not if
5399 it's single-word. */
5400 if (GET_CODE (target) == MEM)
5401 to_rtx = adjust_address (target, mode, offset);
5402 else if (offset == 0)
5406 emit_move_insn (to_rtx, datum);
5413 offset += set_word_size / BITS_PER_UNIT;
5418 /* Don't bother clearing storage if the set is all ones. */
5419 if (TREE_CHAIN (elt) != NULL_TREE
5420 || (TREE_PURPOSE (elt) == NULL_TREE
5422 : ( ! host_integerp (TREE_VALUE (elt), 0)
5423 || ! host_integerp (TREE_PURPOSE (elt), 0)
5424 || (tree_low_cst (TREE_VALUE (elt), 0)
5425 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5426 != (HOST_WIDE_INT) nbits))))
5427 clear_storage (target, expr_size (exp));
5429 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5431 /* Start of range of element or NULL. */
5432 tree startbit = TREE_PURPOSE (elt);
5433 /* End of range of element, or element value. */
5434 tree endbit = TREE_VALUE (elt);
5435 HOST_WIDE_INT startb, endb;
5436 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5438 bitlength_rtx = expand_expr (bitlength,
5439 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5441 /* Handle non-range tuple element like [ expr ]. */
5442 if (startbit == NULL_TREE)
5444 startbit = save_expr (endbit);
5448 startbit = convert (sizetype, startbit);
5449 endbit = convert (sizetype, endbit);
5450 if (! integer_zerop (domain_min))
5452 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5453 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5455 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5456 EXPAND_CONST_ADDRESS);
5457 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5458 EXPAND_CONST_ADDRESS);
5464 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5465 (GET_MODE (target), 0),
5468 emit_move_insn (targetx, target);
5471 else if (GET_CODE (target) == MEM)
5476 /* Optimization: If startbit and endbit are constants divisible
5477 by BITS_PER_UNIT, call memset instead. */
5478 if (TARGET_MEM_FUNCTIONS
5479 && TREE_CODE (startbit) == INTEGER_CST
5480 && TREE_CODE (endbit) == INTEGER_CST
5481 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5482 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5484 emit_library_call (memset_libfunc, LCT_NORMAL,
5486 plus_constant (XEXP (targetx, 0),
5487 startb / BITS_PER_UNIT),
5489 constm1_rtx, TYPE_MODE (integer_type_node),
5490 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5491 TYPE_MODE (sizetype));
5494 emit_library_call (setbits_libfunc, LCT_NORMAL,
5495 VOIDmode, 4, XEXP (targetx, 0),
5496 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5497 startbit_rtx, TYPE_MODE (sizetype),
5498 endbit_rtx, TYPE_MODE (sizetype));
5501 emit_move_insn (target, targetx);
5509 /* Store the value of EXP (an expression tree)
5510 into a subfield of TARGET which has mode MODE and occupies
5511 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5512 If MODE is VOIDmode, it means that we are storing into a bit-field.
5514 If VALUE_MODE is VOIDmode, return nothing in particular.
5515 UNSIGNEDP is not used in this case.
5517 Otherwise, return an rtx for the value stored. This rtx
5518 has mode VALUE_MODE if that is convenient to do.
5519 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5521 TYPE is the type of the underlying object,
5523 ALIAS_SET is the alias set for the destination. This value will
5524 (in general) be different from that for TARGET, since TARGET is a
5525 reference to the containing structure. */
5528 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5531 HOST_WIDE_INT bitsize;
5532 HOST_WIDE_INT bitpos;
5533 enum machine_mode mode;
5535 enum machine_mode value_mode;
5540 HOST_WIDE_INT width_mask = 0;
5542 if (TREE_CODE (exp) == ERROR_MARK)
5545 /* If we have nothing to store, do nothing unless the expression has
5548 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5549 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5550 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5552 /* If we are storing into an unaligned field of an aligned union that is
5553 in a register, we may have the mode of TARGET being an integer mode but
5554 MODE == BLKmode. In that case, get an aligned object whose size and
5555 alignment are the same as TARGET and store TARGET into it (we can avoid
5556 the store if the field being stored is the entire width of TARGET). Then
5557 call ourselves recursively to store the field into a BLKmode version of
5558 that object. Finally, load from the object into TARGET. This is not
5559 very efficient in general, but should only be slightly more expensive
5560 than the otherwise-required unaligned accesses. Perhaps this can be
5561 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5562 twice, once with emit_move_insn and once via store_field. */
5565 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5567 rtx object = assign_temp (type, 0, 1, 1);
5568 rtx blk_object = adjust_address (object, BLKmode, 0);
5570 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5571 emit_move_insn (object, target);
5573 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5576 emit_move_insn (target, object);
5578 /* We want to return the BLKmode version of the data. */
5582 if (GET_CODE (target) == CONCAT)
5584 /* We're storing into a struct containing a single __complex. */
5588 return store_expr (exp, target, 0);
5591 /* If the structure is in a register or if the component
5592 is a bit field, we cannot use addressing to access it.
5593 Use bit-field techniques or SUBREG to store in it. */
5595 if (mode == VOIDmode
5596 || (mode != BLKmode && ! direct_store[(int) mode]
5597 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5598 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5599 || GET_CODE (target) == REG
5600 || GET_CODE (target) == SUBREG
5601 /* If the field isn't aligned enough to store as an ordinary memref,
5602 store it as a bit field. */
5604 && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5605 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
5606 || bitpos % GET_MODE_ALIGNMENT (mode)))
5607 /* If the RHS and field are a constant size and the size of the
5608 RHS isn't the same size as the bitfield, we must use bitfield
5611 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5612 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5614 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5616 /* If BITSIZE is narrower than the size of the type of EXP
5617 we will be narrowing TEMP. Normally, what's wanted are the
5618 low-order bits. However, if EXP's type is a record and this is
5619 big-endian machine, we want the upper BITSIZE bits. */
5620 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5621 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5622 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5623 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5624 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5628 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5630 if (mode != VOIDmode && mode != BLKmode
5631 && mode != TYPE_MODE (TREE_TYPE (exp)))
5632 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5634 /* If the modes of TARGET and TEMP are both BLKmode, both
5635 must be in memory and BITPOS must be aligned on a byte
5636 boundary. If so, we simply do a block copy. */
5637 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5639 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5640 || bitpos % BITS_PER_UNIT != 0)
5643 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5644 emit_block_move (target, temp,
5645 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5649 return value_mode == VOIDmode ? const0_rtx : target;
5652 /* Store the value in the bitfield. */
5653 store_bit_field (target, bitsize, bitpos, mode, temp,
5654 int_size_in_bytes (type));
5656 if (value_mode != VOIDmode)
5658 /* The caller wants an rtx for the value.
5659 If possible, avoid refetching from the bitfield itself. */
5661 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5664 enum machine_mode tmode;
5666 tmode = GET_MODE (temp);
5667 if (tmode == VOIDmode)
5671 return expand_and (tmode, temp,
5672 gen_int_mode (width_mask, tmode),
5675 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5676 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5677 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5680 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5681 NULL_RTX, value_mode, VOIDmode,
5682 int_size_in_bytes (type));
5688 rtx addr = XEXP (target, 0);
5689 rtx to_rtx = target;
5691 /* If a value is wanted, it must be the lhs;
5692 so make the address stable for multiple use. */
5694 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5695 && ! CONSTANT_ADDRESS_P (addr)
5696 /* A frame-pointer reference is already stable. */
5697 && ! (GET_CODE (addr) == PLUS
5698 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5699 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5700 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5701 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5703 /* Now build a reference to just the desired component. */
5705 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5707 if (to_rtx == target)
5708 to_rtx = copy_rtx (to_rtx);
5710 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5711 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5712 set_mem_alias_set (to_rtx, alias_set);
5714 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5718 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5719 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5720 codes and find the ultimate containing object, which we return.
5722 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5723 bit position, and *PUNSIGNEDP to the signedness of the field.
5724 If the position of the field is variable, we store a tree
5725 giving the variable offset (in units) in *POFFSET.
5726 This offset is in addition to the bit position.
5727 If the position is not variable, we store 0 in *POFFSET.
5729 If any of the extraction expressions is volatile,
5730 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5732 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5733 is a mode that can be used to access the field. In that case, *PBITSIZE
5736 If the field describes a variable-sized object, *PMODE is set to
5737 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5738 this case, but the address of the object can be found. */
5741 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5742 punsignedp, pvolatilep)
5744 HOST_WIDE_INT *pbitsize;
5745 HOST_WIDE_INT *pbitpos;
5747 enum machine_mode *pmode;
5752 enum machine_mode mode = VOIDmode;
5753 tree offset = size_zero_node;
5754 tree bit_offset = bitsize_zero_node;
5755 tree placeholder_ptr = 0;
5758 /* First get the mode, signedness, and size. We do this from just the
5759 outermost expression. */
5760 if (TREE_CODE (exp) == COMPONENT_REF)
5762 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5763 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5764 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5766 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5768 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5770 size_tree = TREE_OPERAND (exp, 1);
5771 *punsignedp = TREE_UNSIGNED (exp);
5775 mode = TYPE_MODE (TREE_TYPE (exp));
5776 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5778 if (mode == BLKmode)
5779 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5781 *pbitsize = GET_MODE_BITSIZE (mode);
5786 if (! host_integerp (size_tree, 1))
5787 mode = BLKmode, *pbitsize = -1;
5789 *pbitsize = tree_low_cst (size_tree, 1);
5792 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5793 and find the ultimate containing object. */
5796 if (TREE_CODE (exp) == BIT_FIELD_REF)
5797 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5798 else if (TREE_CODE (exp) == COMPONENT_REF)
5800 tree field = TREE_OPERAND (exp, 1);
5801 tree this_offset = DECL_FIELD_OFFSET (field);
5803 /* If this field hasn't been filled in yet, don't go
5804 past it. This should only happen when folding expressions
5805 made during type construction. */
5806 if (this_offset == 0)
5808 else if (! TREE_CONSTANT (this_offset)
5809 && contains_placeholder_p (this_offset))
5810 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5812 offset = size_binop (PLUS_EXPR, offset, this_offset);
5813 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5814 DECL_FIELD_BIT_OFFSET (field));
5816 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5819 else if (TREE_CODE (exp) == ARRAY_REF
5820 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5822 tree index = TREE_OPERAND (exp, 1);
5823 tree array = TREE_OPERAND (exp, 0);
5824 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5825 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5826 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5828 /* We assume all arrays have sizes that are a multiple of a byte.
5829 First subtract the lower bound, if any, in the type of the
5830 index, then convert to sizetype and multiply by the size of the
5832 if (low_bound != 0 && ! integer_zerop (low_bound))
5833 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5836 /* If the index has a self-referential type, pass it to a
5837 WITH_RECORD_EXPR; if the component size is, pass our
5838 component to one. */
5839 if (! TREE_CONSTANT (index)
5840 && contains_placeholder_p (index))
5841 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5842 if (! TREE_CONSTANT (unit_size)
5843 && contains_placeholder_p (unit_size))
5844 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5846 offset = size_binop (PLUS_EXPR, offset,
5847 size_binop (MULT_EXPR,
5848 convert (sizetype, index),
5852 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5854 tree new = find_placeholder (exp, &placeholder_ptr);
5856 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5857 We might have been called from tree optimization where we
5858 haven't set up an object yet. */
5866 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5867 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5868 && ! ((TREE_CODE (exp) == NOP_EXPR
5869 || TREE_CODE (exp) == CONVERT_EXPR)
5870 && (TYPE_MODE (TREE_TYPE (exp))
5871 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5874 /* If any reference in the chain is volatile, the effect is volatile. */
5875 if (TREE_THIS_VOLATILE (exp))
5878 exp = TREE_OPERAND (exp, 0);
5881 /* If OFFSET is constant, see if we can return the whole thing as a
5882 constant bit position. Otherwise, split it up. */
5883 if (host_integerp (offset, 0)
5884 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5886 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5887 && host_integerp (tem, 0))
5888 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5890 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5896 /* Return 1 if T is an expression that get_inner_reference handles. */
5899 handled_component_p (t)
5902 switch (TREE_CODE (t))
5907 case ARRAY_RANGE_REF:
5908 case NON_LVALUE_EXPR:
5909 case VIEW_CONVERT_EXPR:
5914 return (TYPE_MODE (TREE_TYPE (t))
5915 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5922 /* Given an rtx VALUE that may contain additions and multiplications, return
5923 an equivalent value that just refers to a register, memory, or constant.
5924 This is done by generating instructions to perform the arithmetic and
5925 returning a pseudo-register containing the value.
5927 The returned value may be a REG, SUBREG, MEM or constant. */
5930 force_operand (value, target)
5934 /* Use subtarget as the target for operand 0 of a binary operation. */
5935 rtx subtarget = get_subtarget (target);
5936 enum rtx_code code = GET_CODE (value);
5938 /* Check for a PIC address load. */
5939 if ((code == PLUS || code == MINUS)
5940 && XEXP (value, 0) == pic_offset_table_rtx
5941 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5942 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5943 || GET_CODE (XEXP (value, 1)) == CONST))
5946 subtarget = gen_reg_rtx (GET_MODE (value));
5947 emit_move_insn (subtarget, value);
5951 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5954 target = gen_reg_rtx (GET_MODE (value));
5955 convert_move (target, force_operand (XEXP (value, 0), NULL),
5956 code == ZERO_EXTEND);
5960 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5962 op2 = XEXP (value, 1);
5963 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5965 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5968 op2 = negate_rtx (GET_MODE (value), op2);
5971 /* Check for an addition with OP2 a constant integer and our first
5972 operand a PLUS of a virtual register and something else. In that
5973 case, we want to emit the sum of the virtual register and the
5974 constant first and then add the other value. This allows virtual
5975 register instantiation to simply modify the constant rather than
5976 creating another one around this addition. */
5977 if (code == PLUS && GET_CODE (op2) == CONST_INT
5978 && GET_CODE (XEXP (value, 0)) == PLUS
5979 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5980 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5981 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5983 rtx temp = expand_simple_binop (GET_MODE (value), code,
5984 XEXP (XEXP (value, 0), 0), op2,
5985 subtarget, 0, OPTAB_LIB_WIDEN);
5986 return expand_simple_binop (GET_MODE (value), code, temp,
5987 force_operand (XEXP (XEXP (value,
5989 target, 0, OPTAB_LIB_WIDEN);
5992 op1 = force_operand (XEXP (value, 0), subtarget);
5993 op2 = force_operand (op2, NULL_RTX);
5997 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5999 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6000 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6001 target, 1, OPTAB_LIB_WIDEN);
6003 return expand_divmod (0,
6004 FLOAT_MODE_P (GET_MODE (value))
6005 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6006 GET_MODE (value), op1, op2, target, 0);
6009 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6013 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6017 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6022 target, 0, OPTAB_LIB_WIDEN);
6025 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6026 target, 1, OPTAB_LIB_WIDEN);
6029 if (GET_RTX_CLASS (code) == '1')
6031 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6032 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6035 #ifdef INSN_SCHEDULING
6036 /* On machines that have insn scheduling, we want all memory reference to be
6037 explicit, so we need to deal with such paradoxical SUBREGs. */
6038 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6039 && (GET_MODE_SIZE (GET_MODE (value))
6040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6042 = simplify_gen_subreg (GET_MODE (value),
6043 force_reg (GET_MODE (SUBREG_REG (value)),
6044 force_operand (SUBREG_REG (value),
6046 GET_MODE (SUBREG_REG (value)),
6047 SUBREG_BYTE (value));
6053 /* Subroutine of expand_expr: return nonzero iff there is no way that
6054 EXP can reference X, which is being modified. TOP_P is nonzero if this
6055 call is going to be used to determine whether we need a temporary
6056 for EXP, as opposed to a recursive call to this function.
6058 It is always safe for this routine to return zero since it merely
6059 searches for optimization opportunities. */
6062 safe_from_p (x, exp, top_p)
6069 static tree save_expr_list;
6072 /* If EXP has varying size, we MUST use a target since we currently
6073 have no way of allocating temporaries of variable size
6074 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6075 So we assume here that something at a higher level has prevented a
6076 clash. This is somewhat bogus, but the best we can do. Only
6077 do this when X is BLKmode and when we are at the top level. */
6078 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6079 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6080 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6081 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6082 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6084 && GET_MODE (x) == BLKmode)
6085 /* If X is in the outgoing argument area, it is always safe. */
6086 || (GET_CODE (x) == MEM
6087 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6088 || (GET_CODE (XEXP (x, 0)) == PLUS
6089 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6092 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6093 find the underlying pseudo. */
6094 if (GET_CODE (x) == SUBREG)
6097 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6101 /* A SAVE_EXPR might appear many times in the expression passed to the
6102 top-level safe_from_p call, and if it has a complex subexpression,
6103 examining it multiple times could result in a combinatorial explosion.
6104 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6105 with optimization took about 28 minutes to compile -- even though it was
6106 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6107 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6108 we have processed. Note that the only test of top_p was above. */
6117 rtn = safe_from_p (x, exp, 0);
6119 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6120 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6125 /* Now look at our tree code and possibly recurse. */
6126 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6129 exp_rtl = DECL_RTL_IF_SET (exp);
6136 if (TREE_CODE (exp) == TREE_LIST)
6140 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6142 exp = TREE_CHAIN (exp);
6145 if (TREE_CODE (exp) != TREE_LIST)
6146 return safe_from_p (x, exp, 0);
6149 else if (TREE_CODE (exp) == ERROR_MARK)
6150 return 1; /* An already-visited SAVE_EXPR? */
6156 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6161 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6165 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6166 the expression. If it is set, we conflict iff we are that rtx or
6167 both are in memory. Otherwise, we check all operands of the
6168 expression recursively. */
6170 switch (TREE_CODE (exp))
6173 /* If the operand is static or we are static, we can't conflict.
6174 Likewise if we don't conflict with the operand at all. */
6175 if (staticp (TREE_OPERAND (exp, 0))
6176 || TREE_STATIC (exp)
6177 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6180 /* Otherwise, the only way this can conflict is if we are taking
6181 the address of a DECL a that address if part of X, which is
6183 exp = TREE_OPERAND (exp, 0);
6186 if (!DECL_RTL_SET_P (exp)
6187 || GET_CODE (DECL_RTL (exp)) != MEM)
6190 exp_rtl = XEXP (DECL_RTL (exp), 0);
6195 if (GET_CODE (x) == MEM
6196 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6197 get_alias_set (exp)))
6202 /* Assume that the call will clobber all hard registers and
6204 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6205 || GET_CODE (x) == MEM)
6210 /* If a sequence exists, we would have to scan every instruction
6211 in the sequence to see if it was safe. This is probably not
6213 if (RTL_EXPR_SEQUENCE (exp))
6216 exp_rtl = RTL_EXPR_RTL (exp);
6219 case WITH_CLEANUP_EXPR:
6220 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6223 case CLEANUP_POINT_EXPR:
6224 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6227 exp_rtl = SAVE_EXPR_RTL (exp);
6231 /* If we've already scanned this, don't do it again. Otherwise,
6232 show we've scanned it and record for clearing the flag if we're
6234 if (TREE_PRIVATE (exp))
6237 TREE_PRIVATE (exp) = 1;
6238 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6240 TREE_PRIVATE (exp) = 0;
6244 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6248 /* The only operand we look at is operand 1. The rest aren't
6249 part of the expression. */
6250 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6252 case METHOD_CALL_EXPR:
6253 /* This takes an rtx argument, but shouldn't appear here. */
6260 /* If we have an rtx, we do not need to scan our operands. */
6264 nops = first_rtl_op (TREE_CODE (exp));
6265 for (i = 0; i < nops; i++)
6266 if (TREE_OPERAND (exp, i) != 0
6267 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6270 /* If this is a language-specific tree code, it may require
6271 special handling. */
6272 if ((unsigned int) TREE_CODE (exp)
6273 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6274 && !(*lang_hooks.safe_from_p) (x, exp))
6278 /* If we have an rtl, find any enclosed object. Then see if we conflict
6282 if (GET_CODE (exp_rtl) == SUBREG)
6284 exp_rtl = SUBREG_REG (exp_rtl);
6285 if (GET_CODE (exp_rtl) == REG
6286 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6290 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6291 are memory and they conflict. */
6292 return ! (rtx_equal_p (x, exp_rtl)
6293 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6294 && true_dependence (exp_rtl, VOIDmode, x,
6295 rtx_addr_varies_p)));
6298 /* If we reach here, it is safe. */
6302 /* Subroutine of expand_expr: return rtx if EXP is a
6303 variable or parameter; else return 0. */
6310 switch (TREE_CODE (exp))
6314 return DECL_RTL (exp);
6320 #ifdef MAX_INTEGER_COMPUTATION_MODE
6323 check_max_integer_computation_mode (exp)
6326 enum tree_code code;
6327 enum machine_mode mode;
6329 /* Strip any NOPs that don't change the mode. */
6331 code = TREE_CODE (exp);
6333 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6334 if (code == NOP_EXPR
6335 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6338 /* First check the type of the overall operation. We need only look at
6339 unary, binary and relational operations. */
6340 if (TREE_CODE_CLASS (code) == '1'
6341 || TREE_CODE_CLASS (code) == '2'
6342 || TREE_CODE_CLASS (code) == '<')
6344 mode = TYPE_MODE (TREE_TYPE (exp));
6345 if (GET_MODE_CLASS (mode) == MODE_INT
6346 && mode > MAX_INTEGER_COMPUTATION_MODE)
6347 internal_error ("unsupported wide integer operation");
6350 /* Check operand of a unary op. */
6351 if (TREE_CODE_CLASS (code) == '1')
6353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6354 if (GET_MODE_CLASS (mode) == MODE_INT
6355 && mode > MAX_INTEGER_COMPUTATION_MODE)
6356 internal_error ("unsupported wide integer operation");
6359 /* Check operands of a binary/comparison op. */
6360 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6363 if (GET_MODE_CLASS (mode) == MODE_INT
6364 && mode > MAX_INTEGER_COMPUTATION_MODE)
6365 internal_error ("unsupported wide integer operation");
6367 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6368 if (GET_MODE_CLASS (mode) == MODE_INT
6369 && mode > MAX_INTEGER_COMPUTATION_MODE)
6370 internal_error ("unsupported wide integer operation");
6375 /* Return the highest power of two that EXP is known to be a multiple of.
6376 This is used in updating alignment of MEMs in array references. */
6378 static unsigned HOST_WIDE_INT
6379 highest_pow2_factor (exp)
6382 unsigned HOST_WIDE_INT c0, c1;
6384 switch (TREE_CODE (exp))
6387 /* We can find the lowest bit that's a one. If the low
6388 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6389 We need to handle this case since we can find it in a COND_EXPR,
6390 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6391 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6393 if (TREE_CONSTANT_OVERFLOW (exp))
6394 return BIGGEST_ALIGNMENT;
6397 /* Note: tree_low_cst is intentionally not used here,
6398 we don't care about the upper bits. */
6399 c0 = TREE_INT_CST_LOW (exp);
6401 return c0 ? c0 : BIGGEST_ALIGNMENT;
6405 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6406 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6407 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6408 return MIN (c0, c1);
6411 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6412 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6415 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6417 if (integer_pow2p (TREE_OPERAND (exp, 1))
6418 && host_integerp (TREE_OPERAND (exp, 1), 1))
6420 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6421 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6422 return MAX (1, c0 / c1);
6426 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6427 case SAVE_EXPR: case WITH_RECORD_EXPR:
6428 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6431 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6434 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6435 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6436 return MIN (c0, c1);
6445 /* Similar, except that it is known that the expression must be a multiple
6446 of the alignment of TYPE. */
6448 static unsigned HOST_WIDE_INT
6449 highest_pow2_factor_for_type (type, exp)
6453 unsigned HOST_WIDE_INT type_align, factor;
6455 factor = highest_pow2_factor (exp);
6456 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6457 return MAX (factor, type_align);
6460 /* Return an object on the placeholder list that matches EXP, a
6461 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6462 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6463 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6464 is a location which initially points to a starting location in the
6465 placeholder list (zero means start of the list) and where a pointer into
6466 the placeholder list at which the object is found is placed. */
6469 find_placeholder (exp, plist)
6473 tree type = TREE_TYPE (exp);
6474 tree placeholder_expr;
6476 for (placeholder_expr
6477 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6478 placeholder_expr != 0;
6479 placeholder_expr = TREE_CHAIN (placeholder_expr))
6481 tree need_type = TYPE_MAIN_VARIANT (type);
6484 /* Find the outermost reference that is of the type we want. If none,
6485 see if any object has a type that is a pointer to the type we
6487 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6488 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6489 || TREE_CODE (elt) == COND_EXPR)
6490 ? TREE_OPERAND (elt, 1)
6491 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6492 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6493 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6494 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6495 ? TREE_OPERAND (elt, 0) : 0))
6496 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6499 *plist = placeholder_expr;
6503 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6505 = ((TREE_CODE (elt) == COMPOUND_EXPR
6506 || TREE_CODE (elt) == COND_EXPR)
6507 ? TREE_OPERAND (elt, 1)
6508 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6509 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6510 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6511 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6512 ? TREE_OPERAND (elt, 0) : 0))
6513 if (POINTER_TYPE_P (TREE_TYPE (elt))
6514 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6518 *plist = placeholder_expr;
6519 return build1 (INDIRECT_REF, need_type, elt);
6526 /* expand_expr: generate code for computing expression EXP.
6527 An rtx for the computed value is returned. The value is never null.
6528 In the case of a void EXP, const0_rtx is returned.
6530 The value may be stored in TARGET if TARGET is nonzero.
6531 TARGET is just a suggestion; callers must assume that
6532 the rtx returned may not be the same as TARGET.
6534 If TARGET is CONST0_RTX, it means that the value will be ignored.
6536 If TMODE is not VOIDmode, it suggests generating the
6537 result in mode TMODE. But this is done only when convenient.
6538 Otherwise, TMODE is ignored and the value generated in its natural mode.
6539 TMODE is just a suggestion; callers must assume that
6540 the rtx returned may not have mode TMODE.
6542 Note that TARGET may have neither TMODE nor MODE. In that case, it
6543 probably will not be used.
6545 If MODIFIER is EXPAND_SUM then when EXP is an addition
6546 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6547 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6548 products as above, or REG or MEM, or constant.
6549 Ordinarily in such cases we would output mul or add instructions
6550 and then return a pseudo reg containing the sum.
6552 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6553 it also marks a label as absolutely required (it can't be dead).
6554 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6555 This is used for outputting expressions used in initializers.
6557 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6558 with a constant address even if that address is not normally legitimate.
6559 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6561 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6562 a call parameter. Such targets require special care as we haven't yet
6563 marked TARGET so that it's safe from being trashed by libcalls. We
6564 don't want to use TARGET for anything but the final result;
6565 Intermediate values must go elsewhere. Additionally, calls to
6566 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6569 expand_expr (exp, target, tmode, modifier)
6572 enum machine_mode tmode;
6573 enum expand_modifier modifier;
6576 tree type = TREE_TYPE (exp);
6577 int unsignedp = TREE_UNSIGNED (type);
6578 enum machine_mode mode;
6579 enum tree_code code = TREE_CODE (exp);
6581 rtx subtarget, original_target;
6585 /* Handle ERROR_MARK before anybody tries to access its type. */
6586 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6588 op0 = CONST0_RTX (tmode);
6594 mode = TYPE_MODE (type);
6595 /* Use subtarget as the target for operand 0 of a binary operation. */
6596 subtarget = get_subtarget (target);
6597 original_target = target;
6598 ignore = (target == const0_rtx
6599 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6600 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6601 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6602 && TREE_CODE (type) == VOID_TYPE));
6604 /* If we are going to ignore this result, we need only do something
6605 if there is a side-effect somewhere in the expression. If there
6606 is, short-circuit the most common cases here. Note that we must
6607 not call expand_expr with anything but const0_rtx in case this
6608 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6612 if (! TREE_SIDE_EFFECTS (exp))
6615 /* Ensure we reference a volatile object even if value is ignored, but
6616 don't do this if all we are doing is taking its address. */
6617 if (TREE_THIS_VOLATILE (exp)
6618 && TREE_CODE (exp) != FUNCTION_DECL
6619 && mode != VOIDmode && mode != BLKmode
6620 && modifier != EXPAND_CONST_ADDRESS)
6622 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6623 if (GET_CODE (temp) == MEM)
6624 temp = copy_to_reg (temp);
6628 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6629 || code == INDIRECT_REF || code == BUFFER_REF)
6630 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6633 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6634 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6636 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6637 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6640 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6641 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6642 /* If the second operand has no side effects, just evaluate
6644 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6646 else if (code == BIT_FIELD_REF)
6648 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6649 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6650 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6657 #ifdef MAX_INTEGER_COMPUTATION_MODE
6658 /* Only check stuff here if the mode we want is different from the mode
6659 of the expression; if it's the same, check_max_integer_computation_mode
6660 will handle it. Do we really need to check this stuff at all? */
6663 && GET_MODE (target) != mode
6664 && TREE_CODE (exp) != INTEGER_CST
6665 && TREE_CODE (exp) != PARM_DECL
6666 && TREE_CODE (exp) != ARRAY_REF
6667 && TREE_CODE (exp) != ARRAY_RANGE_REF
6668 && TREE_CODE (exp) != COMPONENT_REF
6669 && TREE_CODE (exp) != BIT_FIELD_REF
6670 && TREE_CODE (exp) != INDIRECT_REF
6671 && TREE_CODE (exp) != CALL_EXPR
6672 && TREE_CODE (exp) != VAR_DECL
6673 && TREE_CODE (exp) != RTL_EXPR)
6675 enum machine_mode mode = GET_MODE (target);
6677 if (GET_MODE_CLASS (mode) == MODE_INT
6678 && mode > MAX_INTEGER_COMPUTATION_MODE)
6679 internal_error ("unsupported wide integer operation");
6683 && TREE_CODE (exp) != INTEGER_CST
6684 && TREE_CODE (exp) != PARM_DECL
6685 && TREE_CODE (exp) != ARRAY_REF
6686 && TREE_CODE (exp) != ARRAY_RANGE_REF
6687 && TREE_CODE (exp) != COMPONENT_REF
6688 && TREE_CODE (exp) != BIT_FIELD_REF
6689 && TREE_CODE (exp) != INDIRECT_REF
6690 && TREE_CODE (exp) != VAR_DECL
6691 && TREE_CODE (exp) != CALL_EXPR
6692 && TREE_CODE (exp) != RTL_EXPR
6693 && GET_MODE_CLASS (tmode) == MODE_INT
6694 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6695 internal_error ("unsupported wide integer operation");
6697 check_max_integer_computation_mode (exp);
6700 /* If will do cse, generate all results into pseudo registers
6701 since 1) that allows cse to find more things
6702 and 2) otherwise cse could produce an insn the machine
6703 cannot support. An exception is a CONSTRUCTOR into a multi-word
6704 MEM: that's much more likely to be most efficient into the MEM.
6705 Another is a CALL_EXPR which must return in memory. */
6707 if (! cse_not_expected && mode != BLKmode && target
6708 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6709 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6710 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6717 tree function = decl_function_context (exp);
6718 /* Handle using a label in a containing function. */
6719 if (function != current_function_decl
6720 && function != inline_function_decl && function != 0)
6722 struct function *p = find_function_data (function);
6723 p->expr->x_forced_labels
6724 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6725 p->expr->x_forced_labels);
6729 if (modifier == EXPAND_INITIALIZER)
6730 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6735 temp = gen_rtx_MEM (FUNCTION_MODE,
6736 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6737 if (function != current_function_decl
6738 && function != inline_function_decl && function != 0)
6739 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6744 if (!DECL_RTL_SET_P (exp))
6746 error_with_decl (exp, "prior parameter's size depends on `%s'");
6747 return CONST0_RTX (mode);
6750 /* ... fall through ... */
6753 /* If a static var's type was incomplete when the decl was written,
6754 but the type is complete now, lay out the decl now. */
6755 if (DECL_SIZE (exp) == 0
6756 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6757 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6758 layout_decl (exp, 0);
6760 /* ... fall through ... */
6764 if (DECL_RTL (exp) == 0)
6767 /* Ensure variable marked as used even if it doesn't go through
6768 a parser. If it hasn't be used yet, write out an external
6770 if (! TREE_USED (exp))
6772 assemble_external (exp);
6773 TREE_USED (exp) = 1;
6776 /* Show we haven't gotten RTL for this yet. */
6779 /* Handle variables inherited from containing functions. */
6780 context = decl_function_context (exp);
6782 /* We treat inline_function_decl as an alias for the current function
6783 because that is the inline function whose vars, types, etc.
6784 are being merged into the current function.
6785 See expand_inline_function. */
6787 if (context != 0 && context != current_function_decl
6788 && context != inline_function_decl
6789 /* If var is static, we don't need a static chain to access it. */
6790 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6791 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6795 /* Mark as non-local and addressable. */
6796 DECL_NONLOCAL (exp) = 1;
6797 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6799 (*lang_hooks.mark_addressable) (exp);
6800 if (GET_CODE (DECL_RTL (exp)) != MEM)
6802 addr = XEXP (DECL_RTL (exp), 0);
6803 if (GET_CODE (addr) == MEM)
6805 = replace_equiv_address (addr,
6806 fix_lexical_addr (XEXP (addr, 0), exp));
6808 addr = fix_lexical_addr (addr, exp);
6810 temp = replace_equiv_address (DECL_RTL (exp), addr);
6813 /* This is the case of an array whose size is to be determined
6814 from its initializer, while the initializer is still being parsed.
6817 else if (GET_CODE (DECL_RTL (exp)) == MEM
6818 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6819 temp = validize_mem (DECL_RTL (exp));
6821 /* If DECL_RTL is memory, we are in the normal case and either
6822 the address is not valid or it is not a register and -fforce-addr
6823 is specified, get the address into a register. */
6825 else if (GET_CODE (DECL_RTL (exp)) == MEM
6826 && modifier != EXPAND_CONST_ADDRESS
6827 && modifier != EXPAND_SUM
6828 && modifier != EXPAND_INITIALIZER
6829 && (! memory_address_p (DECL_MODE (exp),
6830 XEXP (DECL_RTL (exp), 0))
6832 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6833 temp = replace_equiv_address (DECL_RTL (exp),
6834 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6836 /* If we got something, return it. But first, set the alignment
6837 if the address is a register. */
6840 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6841 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6846 /* If the mode of DECL_RTL does not match that of the decl, it
6847 must be a promoted value. We return a SUBREG of the wanted mode,
6848 but mark it so that we know that it was already extended. */
6850 if (GET_CODE (DECL_RTL (exp)) == REG
6851 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6853 /* Get the signedness used for this variable. Ensure we get the
6854 same mode we got when the variable was declared. */
6855 if (GET_MODE (DECL_RTL (exp))
6856 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6857 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6860 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6861 SUBREG_PROMOTED_VAR_P (temp) = 1;
6862 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6866 return DECL_RTL (exp);
6869 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6870 TREE_INT_CST_HIGH (exp), mode);
6872 /* ??? If overflow is set, fold will have done an incomplete job,
6873 which can result in (plus xx (const_int 0)), which can get
6874 simplified by validate_replace_rtx during virtual register
6875 instantiation, which can result in unrecognizable insns.
6876 Avoid this by forcing all overflows into registers. */
6877 if (TREE_CONSTANT_OVERFLOW (exp)
6878 && modifier != EXPAND_INITIALIZER)
6879 temp = force_reg (mode, temp);
6884 return const_vector_from_tree (exp);
6887 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6890 /* If optimized, generate immediate CONST_DOUBLE
6891 which will be turned into memory by reload if necessary.
6893 We used to force a register so that loop.c could see it. But
6894 this does not allow gen_* patterns to perform optimizations with
6895 the constants. It also produces two insns in cases like "x = 1.0;".
6896 On most machines, floating-point constants are not permitted in
6897 many insns, so we'd end up copying it to a register in any case.
6899 Now, we do the copying in expand_binop, if appropriate. */
6900 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6901 TYPE_MODE (TREE_TYPE (exp)));
6905 temp = output_constant_def (exp, 1);
6907 /* temp contains a constant address.
6908 On RISC machines where a constant address isn't valid,
6909 make some insns to get that address into a register. */
6910 if (modifier != EXPAND_CONST_ADDRESS
6911 && modifier != EXPAND_INITIALIZER
6912 && modifier != EXPAND_SUM
6913 && (! memory_address_p (mode, XEXP (temp, 0))
6914 || flag_force_addr))
6915 return replace_equiv_address (temp,
6916 copy_rtx (XEXP (temp, 0)));
6919 case EXPR_WITH_FILE_LOCATION:
6922 const char *saved_input_filename = input_filename;
6923 int saved_lineno = lineno;
6924 input_filename = EXPR_WFL_FILENAME (exp);
6925 lineno = EXPR_WFL_LINENO (exp);
6926 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6927 emit_line_note (input_filename, lineno);
6928 /* Possibly avoid switching back and forth here. */
6929 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6930 input_filename = saved_input_filename;
6931 lineno = saved_lineno;
6936 context = decl_function_context (exp);
6938 /* If this SAVE_EXPR was at global context, assume we are an
6939 initialization function and move it into our context. */
6941 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6943 /* We treat inline_function_decl as an alias for the current function
6944 because that is the inline function whose vars, types, etc.
6945 are being merged into the current function.
6946 See expand_inline_function. */
6947 if (context == current_function_decl || context == inline_function_decl)
6950 /* If this is non-local, handle it. */
6953 /* The following call just exists to abort if the context is
6954 not of a containing function. */
6955 find_function_data (context);
6957 temp = SAVE_EXPR_RTL (exp);
6958 if (temp && GET_CODE (temp) == REG)
6960 put_var_into_stack (exp, /*rescan=*/true);
6961 temp = SAVE_EXPR_RTL (exp);
6963 if (temp == 0 || GET_CODE (temp) != MEM)
6966 replace_equiv_address (temp,
6967 fix_lexical_addr (XEXP (temp, 0), exp));
6969 if (SAVE_EXPR_RTL (exp) == 0)
6971 if (mode == VOIDmode)
6974 temp = assign_temp (build_qualified_type (type,
6976 | TYPE_QUAL_CONST)),
6979 SAVE_EXPR_RTL (exp) = temp;
6980 if (!optimize && GET_CODE (temp) == REG)
6981 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6984 /* If the mode of TEMP does not match that of the expression, it
6985 must be a promoted value. We pass store_expr a SUBREG of the
6986 wanted mode but mark it so that we know that it was already
6989 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6991 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6992 promote_mode (type, mode, &unsignedp, 0);
6993 SUBREG_PROMOTED_VAR_P (temp) = 1;
6994 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6997 if (temp == const0_rtx)
6998 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7000 store_expr (TREE_OPERAND (exp, 0), temp,
7001 modifier == EXPAND_STACK_PARM ? 2 : 0);
7003 TREE_USED (exp) = 1;
7006 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7007 must be a promoted value. We return a SUBREG of the wanted mode,
7008 but mark it so that we know that it was already extended. */
7010 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7011 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7013 /* Compute the signedness and make the proper SUBREG. */
7014 promote_mode (type, mode, &unsignedp, 0);
7015 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7016 SUBREG_PROMOTED_VAR_P (temp) = 1;
7017 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7021 return SAVE_EXPR_RTL (exp);
7026 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7027 TREE_OPERAND (exp, 0)
7028 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7032 case PLACEHOLDER_EXPR:
7034 tree old_list = placeholder_list;
7035 tree placeholder_expr = 0;
7037 exp = find_placeholder (exp, &placeholder_expr);
7041 placeholder_list = TREE_CHAIN (placeholder_expr);
7042 temp = expand_expr (exp, original_target, tmode, modifier);
7043 placeholder_list = old_list;
7047 case WITH_RECORD_EXPR:
7048 /* Put the object on the placeholder list, expand our first operand,
7049 and pop the list. */
7050 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7052 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7054 placeholder_list = TREE_CHAIN (placeholder_list);
7058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7059 expand_goto (TREE_OPERAND (exp, 0));
7061 expand_computed_goto (TREE_OPERAND (exp, 0));
7065 expand_exit_loop_if_false (NULL,
7066 invert_truthvalue (TREE_OPERAND (exp, 0)));
7069 case LABELED_BLOCK_EXPR:
7070 if (LABELED_BLOCK_BODY (exp))
7071 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7072 /* Should perhaps use expand_label, but this is simpler and safer. */
7073 do_pending_stack_adjust ();
7074 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7077 case EXIT_BLOCK_EXPR:
7078 if (EXIT_BLOCK_RETURN (exp))
7079 sorry ("returned value in block_exit_expr");
7080 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7085 expand_start_loop (1);
7086 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7094 tree vars = TREE_OPERAND (exp, 0);
7096 /* Need to open a binding contour here because
7097 if there are any cleanups they must be contained here. */
7098 expand_start_bindings (2);
7100 /* Mark the corresponding BLOCK for output in its proper place. */
7101 if (TREE_OPERAND (exp, 2) != 0
7102 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7103 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7105 /* If VARS have not yet been expanded, expand them now. */
7108 if (!DECL_RTL_SET_P (vars))
7110 expand_decl_init (vars);
7111 vars = TREE_CHAIN (vars);
7114 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7116 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7122 if (RTL_EXPR_SEQUENCE (exp))
7124 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7126 emit_insn (RTL_EXPR_SEQUENCE (exp));
7127 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7129 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7130 free_temps_for_rtl_expr (exp);
7131 return RTL_EXPR_RTL (exp);
7134 /* If we don't need the result, just ensure we evaluate any
7140 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7141 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7146 /* All elts simple constants => refer to a constant in memory. But
7147 if this is a non-BLKmode mode, let it store a field at a time
7148 since that should make a CONST_INT or CONST_DOUBLE when we
7149 fold. Likewise, if we have a target we can use, it is best to
7150 store directly into the target unless the type is large enough
7151 that memcpy will be used. If we are making an initializer and
7152 all operands are constant, put it in memory as well.
7154 FIXME: Avoid trying to fill vector constructors piece-meal.
7155 Output them with output_constant_def below unless we're sure
7156 they're zeros. This should go away when vector initializers
7157 are treated like VECTOR_CST instead of arrays.
7159 else if ((TREE_STATIC (exp)
7160 && ((mode == BLKmode
7161 && ! (target != 0 && safe_from_p (target, exp, 1)))
7162 || TREE_ADDRESSABLE (exp)
7163 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7164 && (! MOVE_BY_PIECES_P
7165 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7167 && ((TREE_CODE (type) == VECTOR_TYPE
7168 && !is_zeros_p (exp))
7169 || ! mostly_zeros_p (exp)))))
7170 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7172 rtx constructor = output_constant_def (exp, 1);
7174 if (modifier != EXPAND_CONST_ADDRESS
7175 && modifier != EXPAND_INITIALIZER
7176 && modifier != EXPAND_SUM)
7177 constructor = validize_mem (constructor);
7183 /* Handle calls that pass values in multiple non-contiguous
7184 locations. The Irix 6 ABI has examples of this. */
7185 if (target == 0 || ! safe_from_p (target, exp, 1)
7186 || GET_CODE (target) == PARALLEL
7187 || modifier == EXPAND_STACK_PARM)
7189 = assign_temp (build_qualified_type (type,
7191 | (TREE_READONLY (exp)
7192 * TYPE_QUAL_CONST))),
7193 0, TREE_ADDRESSABLE (exp), 1);
7195 store_constructor (exp, target, 0, int_expr_size (exp));
7201 tree exp1 = TREE_OPERAND (exp, 0);
7203 tree string = string_constant (exp1, &index);
7205 /* Try to optimize reads from const strings. */
7207 && TREE_CODE (string) == STRING_CST
7208 && TREE_CODE (index) == INTEGER_CST
7209 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7210 && GET_MODE_CLASS (mode) == MODE_INT
7211 && GET_MODE_SIZE (mode) == 1
7212 && modifier != EXPAND_WRITE)
7213 return gen_int_mode (TREE_STRING_POINTER (string)
7214 [TREE_INT_CST_LOW (index)], mode);
7216 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7217 op0 = memory_address (mode, op0);
7218 temp = gen_rtx_MEM (mode, op0);
7219 set_mem_attributes (temp, exp, 0);
7221 /* If we are writing to this object and its type is a record with
7222 readonly fields, we must mark it as readonly so it will
7223 conflict with readonly references to those fields. */
7224 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7225 RTX_UNCHANGING_P (temp) = 1;
7231 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7235 tree array = TREE_OPERAND (exp, 0);
7236 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7237 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7238 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7241 /* Optimize the special-case of a zero lower bound.
7243 We convert the low_bound to sizetype to avoid some problems
7244 with constant folding. (E.g. suppose the lower bound is 1,
7245 and its mode is QI. Without the conversion, (ARRAY
7246 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7247 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7249 if (! integer_zerop (low_bound))
7250 index = size_diffop (index, convert (sizetype, low_bound));
7252 /* Fold an expression like: "foo"[2].
7253 This is not done in fold so it won't happen inside &.
7254 Don't fold if this is for wide characters since it's too
7255 difficult to do correctly and this is a very rare case. */
7257 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7258 && TREE_CODE (array) == STRING_CST
7259 && TREE_CODE (index) == INTEGER_CST
7260 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7261 && GET_MODE_CLASS (mode) == MODE_INT
7262 && GET_MODE_SIZE (mode) == 1)
7263 return gen_int_mode (TREE_STRING_POINTER (array)
7264 [TREE_INT_CST_LOW (index)], mode);
7266 /* If this is a constant index into a constant array,
7267 just get the value from the array. Handle both the cases when
7268 we have an explicit constructor and when our operand is a variable
7269 that was declared const. */
7271 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7272 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7273 && TREE_CODE (index) == INTEGER_CST
7274 && 0 > compare_tree_int (index,
7275 list_length (CONSTRUCTOR_ELTS
7276 (TREE_OPERAND (exp, 0)))))
7280 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7281 i = TREE_INT_CST_LOW (index);
7282 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7286 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7290 else if (optimize >= 1
7291 && modifier != EXPAND_CONST_ADDRESS
7292 && modifier != EXPAND_INITIALIZER
7293 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7294 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7295 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7297 if (TREE_CODE (index) == INTEGER_CST)
7299 tree init = DECL_INITIAL (array);
7301 if (TREE_CODE (init) == CONSTRUCTOR)
7305 for (elem = CONSTRUCTOR_ELTS (init);
7307 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7308 elem = TREE_CHAIN (elem))
7311 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7312 return expand_expr (fold (TREE_VALUE (elem)), target,
7315 else if (TREE_CODE (init) == STRING_CST
7316 && 0 > compare_tree_int (index,
7317 TREE_STRING_LENGTH (init)))
7319 tree type = TREE_TYPE (TREE_TYPE (init));
7320 enum machine_mode mode = TYPE_MODE (type);
7322 if (GET_MODE_CLASS (mode) == MODE_INT
7323 && GET_MODE_SIZE (mode) == 1)
7324 return gen_int_mode (TREE_STRING_POINTER (init)
7325 [TREE_INT_CST_LOW (index)], mode);
7330 goto normal_inner_ref;
7333 /* If the operand is a CONSTRUCTOR, we can just extract the
7334 appropriate field if it is present. */
7335 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7339 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7340 elt = TREE_CHAIN (elt))
7341 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7342 /* We can normally use the value of the field in the
7343 CONSTRUCTOR. However, if this is a bitfield in
7344 an integral mode that we can fit in a HOST_WIDE_INT,
7345 we must mask only the number of bits in the bitfield,
7346 since this is done implicitly by the constructor. If
7347 the bitfield does not meet either of those conditions,
7348 we can't do this optimization. */
7349 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7350 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7352 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7353 <= HOST_BITS_PER_WIDE_INT))))
7355 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7356 && modifier == EXPAND_STACK_PARM)
7358 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7359 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7361 HOST_WIDE_INT bitsize
7362 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7363 enum machine_mode imode
7364 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7366 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7368 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7369 op0 = expand_and (imode, op0, op1, target);
7374 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7377 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7379 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7387 goto normal_inner_ref;
7390 case ARRAY_RANGE_REF:
7393 enum machine_mode mode1;
7394 HOST_WIDE_INT bitsize, bitpos;
7397 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7398 &mode1, &unsignedp, &volatilep);
7401 /* If we got back the original object, something is wrong. Perhaps
7402 we are evaluating an expression too early. In any event, don't
7403 infinitely recurse. */
7407 /* If TEM's type is a union of variable size, pass TARGET to the inner
7408 computation, since it will need a temporary and TARGET is known
7409 to have to do. This occurs in unchecked conversion in Ada. */
7413 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7414 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7416 && modifier != EXPAND_STACK_PARM
7417 ? target : NULL_RTX),
7419 (modifier == EXPAND_INITIALIZER
7420 || modifier == EXPAND_CONST_ADDRESS
7421 || modifier == EXPAND_STACK_PARM)
7422 ? modifier : EXPAND_NORMAL);
7424 /* If this is a constant, put it into a register if it is a
7425 legitimate constant and OFFSET is 0 and memory if it isn't. */
7426 if (CONSTANT_P (op0))
7428 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7429 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7431 op0 = force_reg (mode, op0);
7433 op0 = validize_mem (force_const_mem (mode, op0));
7438 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7441 /* If this object is in a register, put it into memory.
7442 This case can't occur in C, but can in Ada if we have
7443 unchecked conversion of an expression from a scalar type to
7444 an array or record type. */
7445 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7446 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7448 /* If the operand is a SAVE_EXPR, we can deal with this by
7449 forcing the SAVE_EXPR into memory. */
7450 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7452 put_var_into_stack (TREE_OPERAND (exp, 0),
7454 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7459 = build_qualified_type (TREE_TYPE (tem),
7460 (TYPE_QUALS (TREE_TYPE (tem))
7461 | TYPE_QUAL_CONST));
7462 rtx memloc = assign_temp (nt, 1, 1, 1);
7464 emit_move_insn (memloc, op0);
7469 if (GET_CODE (op0) != MEM)
7472 #ifdef POINTERS_EXTEND_UNSIGNED
7473 if (GET_MODE (offset_rtx) != Pmode)
7474 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7476 if (GET_MODE (offset_rtx) != ptr_mode)
7477 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7480 /* A constant address in OP0 can have VOIDmode, we must not try
7481 to call force_reg for that case. Avoid that case. */
7482 if (GET_CODE (op0) == MEM
7483 && GET_MODE (op0) == BLKmode
7484 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7486 && (bitpos % bitsize) == 0
7487 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7488 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7490 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7494 op0 = offset_address (op0, offset_rtx,
7495 highest_pow2_factor (offset));
7498 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7499 record its alignment as BIGGEST_ALIGNMENT. */
7500 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7501 && is_aligning_offset (offset, tem))
7502 set_mem_align (op0, BIGGEST_ALIGNMENT);
7504 /* Don't forget about volatility even if this is a bitfield. */
7505 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7507 if (op0 == orig_op0)
7508 op0 = copy_rtx (op0);
7510 MEM_VOLATILE_P (op0) = 1;
7513 /* The following code doesn't handle CONCAT.
7514 Assume only bitpos == 0 can be used for CONCAT, due to
7515 one element arrays having the same mode as its element. */
7516 if (GET_CODE (op0) == CONCAT)
7518 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7523 /* In cases where an aligned union has an unaligned object
7524 as a field, we might be extracting a BLKmode value from
7525 an integer-mode (e.g., SImode) object. Handle this case
7526 by doing the extract into an object as wide as the field
7527 (which we know to be the width of a basic mode), then
7528 storing into memory, and changing the mode to BLKmode. */
7529 if (mode1 == VOIDmode
7530 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7531 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7532 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7533 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7534 && modifier != EXPAND_CONST_ADDRESS
7535 && modifier != EXPAND_INITIALIZER)
7536 /* If the field isn't aligned enough to fetch as a memref,
7537 fetch it as a bit field. */
7538 || (mode1 != BLKmode
7539 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7540 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7541 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7542 /* If the type and the field are a constant size and the
7543 size of the type isn't the same size as the bitfield,
7544 we must use bitfield operations. */
7546 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7548 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7551 enum machine_mode ext_mode = mode;
7553 if (ext_mode == BLKmode
7554 && ! (target != 0 && GET_CODE (op0) == MEM
7555 && GET_CODE (target) == MEM
7556 && bitpos % BITS_PER_UNIT == 0))
7557 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7559 if (ext_mode == BLKmode)
7561 /* In this case, BITPOS must start at a byte boundary and
7562 TARGET, if specified, must be a MEM. */
7563 if (GET_CODE (op0) != MEM
7564 || (target != 0 && GET_CODE (target) != MEM)
7565 || bitpos % BITS_PER_UNIT != 0)
7568 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7570 target = assign_temp (type, 0, 1, 1);
7572 emit_block_move (target, op0,
7573 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7575 (modifier == EXPAND_STACK_PARM
7576 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7581 op0 = validize_mem (op0);
7583 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7584 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7586 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7587 (modifier == EXPAND_STACK_PARM
7588 ? NULL_RTX : target),
7590 int_size_in_bytes (TREE_TYPE (tem)));
7592 /* If the result is a record type and BITSIZE is narrower than
7593 the mode of OP0, an integral mode, and this is a big endian
7594 machine, we must put the field into the high-order bits. */
7595 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7596 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7597 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7598 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7599 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7603 if (mode == BLKmode)
7605 rtx new = assign_temp (build_qualified_type
7606 ((*lang_hooks.types.type_for_mode)
7608 TYPE_QUAL_CONST), 0, 1, 1);
7610 emit_move_insn (new, op0);
7611 op0 = copy_rtx (new);
7612 PUT_MODE (op0, BLKmode);
7613 set_mem_attributes (op0, exp, 1);
7619 /* If the result is BLKmode, use that to access the object
7621 if (mode == BLKmode)
7624 /* Get a reference to just this component. */
7625 if (modifier == EXPAND_CONST_ADDRESS
7626 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7627 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7629 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7631 if (op0 == orig_op0)
7632 op0 = copy_rtx (op0);
7634 set_mem_attributes (op0, exp, 0);
7635 if (GET_CODE (XEXP (op0, 0)) == REG)
7636 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7638 MEM_VOLATILE_P (op0) |= volatilep;
7639 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7640 || modifier == EXPAND_CONST_ADDRESS
7641 || modifier == EXPAND_INITIALIZER)
7643 else if (target == 0)
7644 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7646 convert_move (target, op0, unsignedp);
7652 rtx insn, before = get_last_insn (), vtbl_ref;
7654 /* Evaluate the interior expression. */
7655 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7658 /* Get or create an instruction off which to hang a note. */
7659 if (REG_P (subtarget))
7662 insn = get_last_insn ();
7665 if (! INSN_P (insn))
7666 insn = prev_nonnote_insn (insn);
7670 target = gen_reg_rtx (GET_MODE (subtarget));
7671 insn = emit_move_insn (target, subtarget);
7674 /* Collect the data for the note. */
7675 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7676 vtbl_ref = plus_constant (vtbl_ref,
7677 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7678 /* Discard the initial CONST that was added. */
7679 vtbl_ref = XEXP (vtbl_ref, 0);
7682 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7687 /* Intended for a reference to a buffer of a file-object in Pascal.
7688 But it's not certain that a special tree code will really be
7689 necessary for these. INDIRECT_REF might work for them. */
7695 /* Pascal set IN expression.
7698 rlo = set_low - (set_low%bits_per_word);
7699 the_word = set [ (index - rlo)/bits_per_word ];
7700 bit_index = index % bits_per_word;
7701 bitmask = 1 << bit_index;
7702 return !!(the_word & bitmask); */
7704 tree set = TREE_OPERAND (exp, 0);
7705 tree index = TREE_OPERAND (exp, 1);
7706 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7707 tree set_type = TREE_TYPE (set);
7708 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7709 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7710 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7711 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7712 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7713 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7714 rtx setaddr = XEXP (setval, 0);
7715 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7717 rtx diff, quo, rem, addr, bit, result;
7719 /* If domain is empty, answer is no. Likewise if index is constant
7720 and out of bounds. */
7721 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7722 && TREE_CODE (set_low_bound) == INTEGER_CST
7723 && tree_int_cst_lt (set_high_bound, set_low_bound))
7724 || (TREE_CODE (index) == INTEGER_CST
7725 && TREE_CODE (set_low_bound) == INTEGER_CST
7726 && tree_int_cst_lt (index, set_low_bound))
7727 || (TREE_CODE (set_high_bound) == INTEGER_CST
7728 && TREE_CODE (index) == INTEGER_CST
7729 && tree_int_cst_lt (set_high_bound, index))))
7733 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7735 /* If we get here, we have to generate the code for both cases
7736 (in range and out of range). */
7738 op0 = gen_label_rtx ();
7739 op1 = gen_label_rtx ();
7741 if (! (GET_CODE (index_val) == CONST_INT
7742 && GET_CODE (lo_r) == CONST_INT))
7743 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7744 GET_MODE (index_val), iunsignedp, op1);
7746 if (! (GET_CODE (index_val) == CONST_INT
7747 && GET_CODE (hi_r) == CONST_INT))
7748 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7749 GET_MODE (index_val), iunsignedp, op1);
7751 /* Calculate the element number of bit zero in the first word
7753 if (GET_CODE (lo_r) == CONST_INT)
7754 rlow = GEN_INT (INTVAL (lo_r)
7755 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7757 rlow = expand_binop (index_mode, and_optab, lo_r,
7758 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7759 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7761 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7762 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7764 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7765 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7766 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7767 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7769 addr = memory_address (byte_mode,
7770 expand_binop (index_mode, add_optab, diff,
7771 setaddr, NULL_RTX, iunsignedp,
7774 /* Extract the bit we want to examine. */
7775 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7776 gen_rtx_MEM (byte_mode, addr),
7777 make_tree (TREE_TYPE (index), rem),
7779 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7780 GET_MODE (target) == byte_mode ? target : 0,
7781 1, OPTAB_LIB_WIDEN);
7783 if (result != target)
7784 convert_move (target, result, 1);
7786 /* Output the code to handle the out-of-range case. */
7789 emit_move_insn (target, const0_rtx);
7794 case WITH_CLEANUP_EXPR:
7795 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7797 WITH_CLEANUP_EXPR_RTL (exp)
7798 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7799 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7800 CLEANUP_EH_ONLY (exp));
7802 /* That's it for this cleanup. */
7803 TREE_OPERAND (exp, 1) = 0;
7805 return WITH_CLEANUP_EXPR_RTL (exp);
7807 case CLEANUP_POINT_EXPR:
7809 /* Start a new binding layer that will keep track of all cleanup
7810 actions to be performed. */
7811 expand_start_bindings (2);
7813 target_temp_slot_level = temp_slot_level;
7815 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7816 /* If we're going to use this value, load it up now. */
7818 op0 = force_not_mem (op0);
7819 preserve_temp_slots (op0);
7820 expand_end_bindings (NULL_TREE, 0, 0);
7825 /* Check for a built-in function. */
7826 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7827 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7829 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7831 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7832 == BUILT_IN_FRONTEND)
7833 return (*lang_hooks.expand_expr) (exp, original_target,
7836 return expand_builtin (exp, target, subtarget, tmode, ignore);
7839 return expand_call (exp, target, ignore);
7841 case NON_LVALUE_EXPR:
7844 case REFERENCE_EXPR:
7845 if (TREE_OPERAND (exp, 0) == error_mark_node)
7848 if (TREE_CODE (type) == UNION_TYPE)
7850 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7852 /* If both input and output are BLKmode, this conversion isn't doing
7853 anything except possibly changing memory attribute. */
7854 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7856 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7859 result = copy_rtx (result);
7860 set_mem_attributes (result, exp, 0);
7865 target = assign_temp (type, 0, 1, 1);
7867 if (GET_CODE (target) == MEM)
7868 /* Store data into beginning of memory target. */
7869 store_expr (TREE_OPERAND (exp, 0),
7870 adjust_address (target, TYPE_MODE (valtype), 0),
7871 modifier == EXPAND_STACK_PARM ? 2 : 0);
7873 else if (GET_CODE (target) == REG)
7874 /* Store this field into a union of the proper type. */
7875 store_field (target,
7876 MIN ((int_size_in_bytes (TREE_TYPE
7877 (TREE_OPERAND (exp, 0)))
7879 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7880 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7881 VOIDmode, 0, type, 0);
7885 /* Return the entire union. */
7889 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7891 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7894 /* If the signedness of the conversion differs and OP0 is
7895 a promoted SUBREG, clear that indication since we now
7896 have to do the proper extension. */
7897 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7898 && GET_CODE (op0) == SUBREG)
7899 SUBREG_PROMOTED_VAR_P (op0) = 0;
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7905 if (GET_MODE (op0) == mode)
7908 /* If OP0 is a constant, just convert it into the proper mode. */
7909 if (CONSTANT_P (op0))
7911 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7912 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7914 if (modifier == EXPAND_INITIALIZER)
7915 return simplify_gen_subreg (mode, op0, inner_mode,
7916 subreg_lowpart_offset (mode,
7919 return convert_modes (mode, inner_mode, op0,
7920 TREE_UNSIGNED (inner_type));
7923 if (modifier == EXPAND_INITIALIZER)
7924 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7928 convert_to_mode (mode, op0,
7929 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7931 convert_move (target, op0,
7932 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7935 case VIEW_CONVERT_EXPR:
7936 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7938 /* If the input and output modes are both the same, we are done.
7939 Otherwise, if neither mode is BLKmode and both are within a word, we
7940 can use gen_lowpart. If neither is true, make sure the operand is
7941 in memory and convert the MEM to the new mode. */
7942 if (TYPE_MODE (type) == GET_MODE (op0))
7944 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7945 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7946 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7947 op0 = gen_lowpart (TYPE_MODE (type), op0);
7948 else if (GET_CODE (op0) != MEM)
7950 /* If the operand is not a MEM, force it into memory. Since we
7951 are going to be be changing the mode of the MEM, don't call
7952 force_const_mem for constants because we don't allow pool
7953 constants to change mode. */
7954 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7956 if (TREE_ADDRESSABLE (exp))
7959 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7961 = assign_stack_temp_for_type
7962 (TYPE_MODE (inner_type),
7963 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7965 emit_move_insn (target, op0);
7969 /* At this point, OP0 is in the correct mode. If the output type is such
7970 that the operand is known to be aligned, indicate that it is.
7971 Otherwise, we need only be concerned about alignment for non-BLKmode
7973 if (GET_CODE (op0) == MEM)
7975 op0 = copy_rtx (op0);
7977 if (TYPE_ALIGN_OK (type))
7978 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7979 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7980 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7982 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7983 HOST_WIDE_INT temp_size
7984 = MAX (int_size_in_bytes (inner_type),
7985 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7986 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7987 temp_size, 0, type);
7988 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7990 if (TREE_ADDRESSABLE (exp))
7993 if (GET_MODE (op0) == BLKmode)
7994 emit_block_move (new_with_op0_mode, op0,
7995 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7996 (modifier == EXPAND_STACK_PARM
7997 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7999 emit_move_insn (new_with_op0_mode, op0);
8004 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8010 this_optab = ! unsignedp && flag_trapv
8011 && (GET_MODE_CLASS (mode) == MODE_INT)
8012 ? addv_optab : add_optab;
8014 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8015 something else, make sure we add the register to the constant and
8016 then to the other thing. This case can occur during strength
8017 reduction and doing it this way will produce better code if the
8018 frame pointer or argument pointer is eliminated.
8020 fold-const.c will ensure that the constant is always in the inner
8021 PLUS_EXPR, so the only case we need to do anything about is if
8022 sp, ap, or fp is our second argument, in which case we must swap
8023 the innermost first argument and our second argument. */
8025 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8026 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8027 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8028 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8029 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8030 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8032 tree t = TREE_OPERAND (exp, 1);
8034 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8035 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8038 /* If the result is to be ptr_mode and we are adding an integer to
8039 something, we might be forming a constant. So try to use
8040 plus_constant. If it produces a sum and we can't accept it,
8041 use force_operand. This allows P = &ARR[const] to generate
8042 efficient code on machines where a SYMBOL_REF is not a valid
8045 If this is an EXPAND_SUM call, always return the sum. */
8046 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8047 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8049 if (modifier == EXPAND_STACK_PARM)
8051 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8052 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8053 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8057 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8059 /* Use immed_double_const to ensure that the constant is
8060 truncated according to the mode of OP1, then sign extended
8061 to a HOST_WIDE_INT. Using the constant directly can result
8062 in non-canonical RTL in a 64x32 cross compile. */
8064 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8066 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8067 op1 = plus_constant (op1, INTVAL (constant_part));
8068 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8069 op1 = force_operand (op1, target);
8073 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8074 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8075 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8080 (modifier == EXPAND_INITIALIZER
8081 ? EXPAND_INITIALIZER : EXPAND_SUM));
8082 if (! CONSTANT_P (op0))
8084 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8085 VOIDmode, modifier);
8086 /* Don't go to both_summands if modifier
8087 says it's not right to return a PLUS. */
8088 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8092 /* Use immed_double_const to ensure that the constant is
8093 truncated according to the mode of OP1, then sign extended
8094 to a HOST_WIDE_INT. Using the constant directly can result
8095 in non-canonical RTL in a 64x32 cross compile. */
8097 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8099 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8100 op0 = plus_constant (op0, INTVAL (constant_part));
8101 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8102 op0 = force_operand (op0, target);
8107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8110 /* No sense saving up arithmetic to be done
8111 if it's all in the wrong mode to form part of an address.
8112 And force_operand won't know whether to sign-extend or
8114 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8115 || mode != ptr_mode)
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8119 if (op0 == const0_rtx)
8121 if (op1 == const0_rtx)
8126 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8127 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8129 /* We come here from MINUS_EXPR when the second operand is a
8132 /* Make sure any term that's a sum with a constant comes last. */
8133 if (GET_CODE (op0) == PLUS
8134 && CONSTANT_P (XEXP (op0, 1)))
8140 /* If adding to a sum including a constant,
8141 associate it to put the constant outside. */
8142 if (GET_CODE (op1) == PLUS
8143 && CONSTANT_P (XEXP (op1, 1)))
8145 rtx constant_term = const0_rtx;
8147 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8150 /* Ensure that MULT comes first if there is one. */
8151 else if (GET_CODE (op0) == MULT)
8152 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8154 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8156 /* Let's also eliminate constants from op0 if possible. */
8157 op0 = eliminate_constant_term (op0, &constant_term);
8159 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8160 their sum should be a constant. Form it into OP1, since the
8161 result we want will then be OP0 + OP1. */
8163 temp = simplify_binary_operation (PLUS, mode, constant_term,
8168 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8171 /* Put a constant term last and put a multiplication first. */
8172 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8173 temp = op1, op1 = op0, op0 = temp;
8175 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8176 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8179 /* For initializers, we are allowed to return a MINUS of two
8180 symbolic constants. Here we handle all cases when both operands
8182 /* Handle difference of two symbolic constants,
8183 for the sake of an initializer. */
8184 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8185 && really_constant_p (TREE_OPERAND (exp, 0))
8186 && really_constant_p (TREE_OPERAND (exp, 1)))
8188 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8190 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8193 /* If the last operand is a CONST_INT, use plus_constant of
8194 the negated constant. Else make the MINUS. */
8195 if (GET_CODE (op1) == CONST_INT)
8196 return plus_constant (op0, - INTVAL (op1));
8198 return gen_rtx_MINUS (mode, op0, op1);
8201 this_optab = ! unsignedp && flag_trapv
8202 && (GET_MODE_CLASS(mode) == MODE_INT)
8203 ? subv_optab : sub_optab;
8205 /* No sense saving up arithmetic to be done
8206 if it's all in the wrong mode to form part of an address.
8207 And force_operand won't know whether to sign-extend or
8209 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8210 || mode != ptr_mode)
8213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8219 /* Convert A - const to A + (-const). */
8220 if (GET_CODE (op1) == CONST_INT)
8222 op1 = negate_rtx (mode, op1);
8229 /* If first operand is constant, swap them.
8230 Thus the following special case checks need only
8231 check the second operand. */
8232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8234 tree t1 = TREE_OPERAND (exp, 0);
8235 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8236 TREE_OPERAND (exp, 1) = t1;
8239 /* Attempt to return something suitable for generating an
8240 indexed address, for machines that support that. */
8242 if (modifier == EXPAND_SUM && mode == ptr_mode
8243 && host_integerp (TREE_OPERAND (exp, 1), 0))
8245 tree exp1 = TREE_OPERAND (exp, 1);
8247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8250 /* If we knew for certain that this is arithmetic for an array
8251 reference, and we knew the bounds of the array, then we could
8252 apply the distributive law across (PLUS X C) for constant C.
8253 Without such knowledge, we risk overflowing the computation
8254 when both X and C are large, but X+C isn't. */
8255 /* ??? Could perhaps special-case EXP being unsigned and C being
8256 positive. In that case we are certain that X+C is no smaller
8257 than X and so the transformed expression will overflow iff the
8258 original would have. */
8260 if (GET_CODE (op0) != REG)
8261 op0 = force_operand (op0, NULL_RTX);
8262 if (GET_CODE (op0) != REG)
8263 op0 = copy_to_mode_reg (mode, op0);
8265 return gen_rtx_MULT (mode, op0,
8266 gen_int_mode (tree_low_cst (exp1, 0),
8267 TYPE_MODE (TREE_TYPE (exp1))));
8270 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8273 if (modifier == EXPAND_STACK_PARM)
8276 /* Check for multiplying things that have been extended
8277 from a narrower type. If this machine supports multiplying
8278 in that narrower type with a result in the desired type,
8279 do it that way, and avoid the explicit type-conversion. */
8280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8281 && TREE_CODE (type) == INTEGER_TYPE
8282 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8283 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8284 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8285 && int_fits_type_p (TREE_OPERAND (exp, 1),
8286 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8287 /* Don't use a widening multiply if a shift will do. */
8288 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8289 > HOST_BITS_PER_WIDE_INT)
8290 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8292 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8293 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8295 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8296 /* If both operands are extended, they must either both
8297 be zero-extended or both be sign-extended. */
8298 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8300 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8302 enum machine_mode innermode
8303 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8304 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8305 ? smul_widen_optab : umul_widen_optab);
8306 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8307 ? umul_widen_optab : smul_widen_optab);
8308 if (mode == GET_MODE_WIDER_MODE (innermode))
8310 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8312 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8313 NULL_RTX, VOIDmode, 0);
8314 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8318 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8319 NULL_RTX, VOIDmode, 0);
8322 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8323 && innermode == word_mode)
8326 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8327 NULL_RTX, VOIDmode, 0);
8328 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8329 op1 = convert_modes (innermode, mode,
8330 expand_expr (TREE_OPERAND (exp, 1),
8331 NULL_RTX, VOIDmode, 0),
8334 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8335 NULL_RTX, VOIDmode, 0);
8336 temp = expand_binop (mode, other_optab, op0, op1, target,
8337 unsignedp, OPTAB_LIB_WIDEN);
8338 htem = expand_mult_highpart_adjust (innermode,
8339 gen_highpart (innermode, temp),
8341 gen_highpart (innermode, temp),
8343 emit_move_insn (gen_highpart (innermode, temp), htem);
8348 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8349 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8350 return expand_mult (mode, op0, op1, target, unsignedp);
8352 case TRUNC_DIV_EXPR:
8353 case FLOOR_DIV_EXPR:
8355 case ROUND_DIV_EXPR:
8356 case EXACT_DIV_EXPR:
8357 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8359 if (modifier == EXPAND_STACK_PARM)
8361 /* Possible optimization: compute the dividend with EXPAND_SUM
8362 then if the divisor is constant can optimize the case
8363 where some terms of the dividend have coeffs divisible by it. */
8364 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8365 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8366 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8369 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8370 expensive divide. If not, combine will rebuild the original
8372 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8373 && TREE_CODE (type) == REAL_TYPE
8374 && !real_onep (TREE_OPERAND (exp, 0)))
8375 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8376 build (RDIV_EXPR, type,
8377 build_real (type, dconst1),
8378 TREE_OPERAND (exp, 1))),
8379 target, tmode, modifier);
8380 this_optab = sdiv_optab;
8383 case TRUNC_MOD_EXPR:
8384 case FLOOR_MOD_EXPR:
8386 case ROUND_MOD_EXPR:
8387 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8389 if (modifier == EXPAND_STACK_PARM)
8391 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8392 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8393 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8395 case FIX_ROUND_EXPR:
8396 case FIX_FLOOR_EXPR:
8398 abort (); /* Not used for C. */
8400 case FIX_TRUNC_EXPR:
8401 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8402 if (target == 0 || modifier == EXPAND_STACK_PARM)
8403 target = gen_reg_rtx (mode);
8404 expand_fix (target, op0, unsignedp);
8408 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8409 if (target == 0 || modifier == EXPAND_STACK_PARM)
8410 target = gen_reg_rtx (mode);
8411 /* expand_float can't figure out what to do if FROM has VOIDmode.
8412 So give it the correct mode. With -O, cse will optimize this. */
8413 if (GET_MODE (op0) == VOIDmode)
8414 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8416 expand_float (target, op0,
8417 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8422 if (modifier == EXPAND_STACK_PARM)
8424 temp = expand_unop (mode,
8425 ! unsignedp && flag_trapv
8426 && (GET_MODE_CLASS(mode) == MODE_INT)
8427 ? negv_optab : neg_optab, op0, target, 0);
8433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8434 if (modifier == EXPAND_STACK_PARM)
8437 /* Handle complex values specially. */
8438 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8439 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8440 return expand_complex_abs (mode, op0, target, unsignedp);
8442 /* Unsigned abs is simply the operand. Testing here means we don't
8443 risk generating incorrect code below. */
8444 if (TREE_UNSIGNED (type))
8447 return expand_abs (mode, op0, target, unsignedp,
8448 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8452 target = original_target;
8454 || modifier == EXPAND_STACK_PARM
8455 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8456 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8457 || GET_MODE (target) != mode
8458 || (GET_CODE (target) == REG
8459 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8460 target = gen_reg_rtx (mode);
8461 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8462 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8464 /* First try to do it with a special MIN or MAX instruction.
8465 If that does not win, use a conditional jump to select the proper
8467 this_optab = (TREE_UNSIGNED (type)
8468 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8469 : (code == MIN_EXPR ? smin_optab : smax_optab));
8471 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8476 /* At this point, a MEM target is no longer useful; we will get better
8479 if (GET_CODE (target) == MEM)
8480 target = gen_reg_rtx (mode);
8483 emit_move_insn (target, op0);
8485 op0 = gen_label_rtx ();
8487 /* If this mode is an integer too wide to compare properly,
8488 compare word by word. Rely on cse to optimize constant cases. */
8489 if (GET_MODE_CLASS (mode) == MODE_INT
8490 && ! can_compare_p (GE, mode, ccp_jump))
8492 if (code == MAX_EXPR)
8493 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8494 target, op1, NULL_RTX, op0);
8496 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8497 op1, target, NULL_RTX, op0);
8501 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8502 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8503 unsignedp, mode, NULL_RTX, NULL_RTX,
8506 emit_move_insn (target, op1);
8511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8512 if (modifier == EXPAND_STACK_PARM)
8514 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8521 if (modifier == EXPAND_STACK_PARM)
8523 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8530 temp = expand_unop (mode, clz_optab, op0, target, 1);
8536 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8537 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8543 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8544 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8550 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8551 temp = expand_unop (mode, parity_optab, op0, target, 1);
8556 /* ??? Can optimize bitwise operations with one arg constant.
8557 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8558 and (a bitwise1 b) bitwise2 b (etc)
8559 but that is probably not worth while. */
8561 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8562 boolean values when we want in all cases to compute both of them. In
8563 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8564 as actual zero-or-1 values and then bitwise anding. In cases where
8565 there cannot be any side effects, better code would be made by
8566 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8567 how to recognize those cases. */
8569 case TRUTH_AND_EXPR:
8571 this_optab = and_optab;
8576 this_optab = ior_optab;
8579 case TRUTH_XOR_EXPR:
8581 this_optab = xor_optab;
8588 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8590 if (modifier == EXPAND_STACK_PARM)
8592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8593 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8596 /* Could determine the answer when only additive constants differ. Also,
8597 the addition of one can be handled by changing the condition. */
8604 case UNORDERED_EXPR:
8611 temp = do_store_flag (exp,
8612 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8613 tmode != VOIDmode ? tmode : mode, 0);
8617 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8618 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8620 && GET_CODE (original_target) == REG
8621 && (GET_MODE (original_target)
8622 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8624 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8627 /* If temp is constant, we can just compute the result. */
8628 if (GET_CODE (temp) == CONST_INT)
8630 if (INTVAL (temp) != 0)
8631 emit_move_insn (target, const1_rtx);
8633 emit_move_insn (target, const0_rtx);
8638 if (temp != original_target)
8640 enum machine_mode mode1 = GET_MODE (temp);
8641 if (mode1 == VOIDmode)
8642 mode1 = tmode != VOIDmode ? tmode : mode;
8644 temp = copy_to_mode_reg (mode1, temp);
8647 op1 = gen_label_rtx ();
8648 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8649 GET_MODE (temp), unsignedp, op1);
8650 emit_move_insn (temp, const1_rtx);
8655 /* If no set-flag instruction, must generate a conditional
8656 store into a temporary variable. Drop through
8657 and handle this like && and ||. */
8659 case TRUTH_ANDIF_EXPR:
8660 case TRUTH_ORIF_EXPR:
8663 || modifier == EXPAND_STACK_PARM
8664 || ! safe_from_p (target, exp, 1)
8665 /* Make sure we don't have a hard reg (such as function's return
8666 value) live across basic blocks, if not optimizing. */
8667 || (!optimize && GET_CODE (target) == REG
8668 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8669 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8672 emit_clr_insn (target);
8674 op1 = gen_label_rtx ();
8675 jumpifnot (exp, op1);
8678 emit_0_to_1_insn (target);
8681 return ignore ? const0_rtx : target;
8683 case TRUTH_NOT_EXPR:
8684 if (modifier == EXPAND_STACK_PARM)
8686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8687 /* The parser is careful to generate TRUTH_NOT_EXPR
8688 only with operands that are always zero or one. */
8689 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8690 target, 1, OPTAB_LIB_WIDEN);
8696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8698 return expand_expr (TREE_OPERAND (exp, 1),
8699 (ignore ? const0_rtx : target),
8700 VOIDmode, modifier);
8703 /* If we would have a "singleton" (see below) were it not for a
8704 conversion in each arm, bring that conversion back out. */
8705 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8706 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8707 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8708 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8710 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8711 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8713 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8714 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8715 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8716 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8717 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8718 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8719 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8720 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8721 return expand_expr (build1 (NOP_EXPR, type,
8722 build (COND_EXPR, TREE_TYPE (iftrue),
8723 TREE_OPERAND (exp, 0),
8725 target, tmode, modifier);
8729 /* Note that COND_EXPRs whose type is a structure or union
8730 are required to be constructed to contain assignments of
8731 a temporary variable, so that we can evaluate them here
8732 for side effect only. If type is void, we must do likewise. */
8734 /* If an arm of the branch requires a cleanup,
8735 only that cleanup is performed. */
8738 tree binary_op = 0, unary_op = 0;
8740 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8741 convert it to our mode, if necessary. */
8742 if (integer_onep (TREE_OPERAND (exp, 1))
8743 && integer_zerop (TREE_OPERAND (exp, 2))
8744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8753 if (modifier == EXPAND_STACK_PARM)
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8756 if (GET_MODE (op0) == mode)
8760 target = gen_reg_rtx (mode);
8761 convert_move (target, op0, unsignedp);
8765 /* Check for X ? A + B : A. If we have this, we can copy A to the
8766 output and conditionally add B. Similarly for unary operations.
8767 Don't do this if X has side-effects because those side effects
8768 might affect A or B and the "?" operation is a sequence point in
8769 ANSI. (operand_equal_p tests for side effects.) */
8771 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8772 && operand_equal_p (TREE_OPERAND (exp, 2),
8773 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8774 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8775 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8776 && operand_equal_p (TREE_OPERAND (exp, 1),
8777 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8778 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8779 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8780 && operand_equal_p (TREE_OPERAND (exp, 2),
8781 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8782 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8783 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8784 && operand_equal_p (TREE_OPERAND (exp, 1),
8785 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8786 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8788 /* If we are not to produce a result, we have no target. Otherwise,
8789 if a target was specified use it; it will not be used as an
8790 intermediate target unless it is safe. If no target, use a
8795 else if (modifier == EXPAND_STACK_PARM)
8796 temp = assign_temp (type, 0, 0, 1);
8797 else if (original_target
8798 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8799 || (singleton && GET_CODE (original_target) == REG
8800 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8801 && original_target == var_rtx (singleton)))
8802 && GET_MODE (original_target) == mode
8803 #ifdef HAVE_conditional_move
8804 && (! can_conditionally_move_p (mode)
8805 || GET_CODE (original_target) == REG
8806 || TREE_ADDRESSABLE (type))
8808 && (GET_CODE (original_target) != MEM
8809 || TREE_ADDRESSABLE (type)))
8810 temp = original_target;
8811 else if (TREE_ADDRESSABLE (type))
8814 temp = assign_temp (type, 0, 0, 1);
8816 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8817 do the test of X as a store-flag operation, do this as
8818 A + ((X != 0) << log C). Similarly for other simple binary
8819 operators. Only do for C == 1 if BRANCH_COST is low. */
8820 if (temp && singleton && binary_op
8821 && (TREE_CODE (binary_op) == PLUS_EXPR
8822 || TREE_CODE (binary_op) == MINUS_EXPR
8823 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8824 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8825 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8826 : integer_onep (TREE_OPERAND (binary_op, 1)))
8827 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8831 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8832 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8833 ? addv_optab : add_optab)
8834 : TREE_CODE (binary_op) == MINUS_EXPR
8835 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8836 ? subv_optab : sub_optab)
8837 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8840 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8841 if (singleton == TREE_OPERAND (exp, 1))
8842 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8844 cond = TREE_OPERAND (exp, 0);
8846 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8848 mode, BRANCH_COST <= 1);
8850 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8851 result = expand_shift (LSHIFT_EXPR, mode, result,
8852 build_int_2 (tree_log2
8856 (safe_from_p (temp, singleton, 1)
8857 ? temp : NULL_RTX), 0);
8861 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8862 return expand_binop (mode, boptab, op1, result, temp,
8863 unsignedp, OPTAB_LIB_WIDEN);
8867 do_pending_stack_adjust ();
8869 op0 = gen_label_rtx ();
8871 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8875 /* If the target conflicts with the other operand of the
8876 binary op, we can't use it. Also, we can't use the target
8877 if it is a hard register, because evaluating the condition
8878 might clobber it. */
8880 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8881 || (GET_CODE (temp) == REG
8882 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8883 temp = gen_reg_rtx (mode);
8884 store_expr (singleton, temp,
8885 modifier == EXPAND_STACK_PARM ? 2 : 0);
8888 expand_expr (singleton,
8889 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8890 if (singleton == TREE_OPERAND (exp, 1))
8891 jumpif (TREE_OPERAND (exp, 0), op0);
8893 jumpifnot (TREE_OPERAND (exp, 0), op0);
8895 start_cleanup_deferral ();
8896 if (binary_op && temp == 0)
8897 /* Just touch the other operand. */
8898 expand_expr (TREE_OPERAND (binary_op, 1),
8899 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8901 store_expr (build (TREE_CODE (binary_op), type,
8902 make_tree (type, temp),
8903 TREE_OPERAND (binary_op, 1)),
8904 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8906 store_expr (build1 (TREE_CODE (unary_op), type,
8907 make_tree (type, temp)),
8908 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8911 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8912 comparison operator. If we have one of these cases, set the
8913 output to A, branch on A (cse will merge these two references),
8914 then set the output to FOO. */
8916 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8917 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8918 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8919 TREE_OPERAND (exp, 1), 0)
8920 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8921 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8922 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8924 if (GET_CODE (temp) == REG
8925 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8926 temp = gen_reg_rtx (mode);
8927 store_expr (TREE_OPERAND (exp, 1), temp,
8928 modifier == EXPAND_STACK_PARM ? 2 : 0);
8929 jumpif (TREE_OPERAND (exp, 0), op0);
8931 start_cleanup_deferral ();
8932 store_expr (TREE_OPERAND (exp, 2), temp,
8933 modifier == EXPAND_STACK_PARM ? 2 : 0);
8937 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8938 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8939 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8940 TREE_OPERAND (exp, 2), 0)
8941 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8942 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8943 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8945 if (GET_CODE (temp) == REG
8946 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8947 temp = gen_reg_rtx (mode);
8948 store_expr (TREE_OPERAND (exp, 2), temp,
8949 modifier == EXPAND_STACK_PARM ? 2 : 0);
8950 jumpifnot (TREE_OPERAND (exp, 0), op0);
8952 start_cleanup_deferral ();
8953 store_expr (TREE_OPERAND (exp, 1), temp,
8954 modifier == EXPAND_STACK_PARM ? 2 : 0);
8959 op1 = gen_label_rtx ();
8960 jumpifnot (TREE_OPERAND (exp, 0), op0);
8962 start_cleanup_deferral ();
8964 /* One branch of the cond can be void, if it never returns. For
8965 example A ? throw : E */
8967 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8968 store_expr (TREE_OPERAND (exp, 1), temp,
8969 modifier == EXPAND_STACK_PARM ? 2 : 0);
8971 expand_expr (TREE_OPERAND (exp, 1),
8972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8973 end_cleanup_deferral ();
8975 emit_jump_insn (gen_jump (op1));
8978 start_cleanup_deferral ();
8980 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8981 store_expr (TREE_OPERAND (exp, 2), temp,
8982 modifier == EXPAND_STACK_PARM ? 2 : 0);
8984 expand_expr (TREE_OPERAND (exp, 2),
8985 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8988 end_cleanup_deferral ();
8999 /* Something needs to be initialized, but we didn't know
9000 where that thing was when building the tree. For example,
9001 it could be the return value of a function, or a parameter
9002 to a function which lays down in the stack, or a temporary
9003 variable which must be passed by reference.
9005 We guarantee that the expression will either be constructed
9006 or copied into our original target. */
9008 tree slot = TREE_OPERAND (exp, 0);
9009 tree cleanups = NULL_TREE;
9012 if (TREE_CODE (slot) != VAR_DECL)
9016 target = original_target;
9018 /* Set this here so that if we get a target that refers to a
9019 register variable that's already been used, put_reg_into_stack
9020 knows that it should fix up those uses. */
9021 TREE_USED (slot) = 1;
9025 if (DECL_RTL_SET_P (slot))
9027 target = DECL_RTL (slot);
9028 /* If we have already expanded the slot, so don't do
9030 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9035 target = assign_temp (type, 2, 0, 1);
9036 /* All temp slots at this level must not conflict. */
9037 preserve_temp_slots (target);
9038 SET_DECL_RTL (slot, target);
9039 if (TREE_ADDRESSABLE (slot))
9040 put_var_into_stack (slot, /*rescan=*/false);
9042 /* Since SLOT is not known to the called function
9043 to belong to its stack frame, we must build an explicit
9044 cleanup. This case occurs when we must build up a reference
9045 to pass the reference as an argument. In this case,
9046 it is very likely that such a reference need not be
9049 if (TREE_OPERAND (exp, 2) == 0)
9050 TREE_OPERAND (exp, 2)
9051 = (*lang_hooks.maybe_build_cleanup) (slot);
9052 cleanups = TREE_OPERAND (exp, 2);
9057 /* This case does occur, when expanding a parameter which
9058 needs to be constructed on the stack. The target
9059 is the actual stack address that we want to initialize.
9060 The function we call will perform the cleanup in this case. */
9062 /* If we have already assigned it space, use that space,
9063 not target that we were passed in, as our target
9064 parameter is only a hint. */
9065 if (DECL_RTL_SET_P (slot))
9067 target = DECL_RTL (slot);
9068 /* If we have already expanded the slot, so don't do
9070 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9075 SET_DECL_RTL (slot, target);
9076 /* If we must have an addressable slot, then make sure that
9077 the RTL that we just stored in slot is OK. */
9078 if (TREE_ADDRESSABLE (slot))
9079 put_var_into_stack (slot, /*rescan=*/true);
9083 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9084 /* Mark it as expanded. */
9085 TREE_OPERAND (exp, 1) = NULL_TREE;
9087 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9089 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9096 tree lhs = TREE_OPERAND (exp, 0);
9097 tree rhs = TREE_OPERAND (exp, 1);
9099 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9105 /* If lhs is complex, expand calls in rhs before computing it.
9106 That's so we don't compute a pointer and save it over a
9107 call. If lhs is simple, compute it first so we can give it
9108 as a target if the rhs is just a call. This avoids an
9109 extra temp and copy and that prevents a partial-subsumption
9110 which makes bad code. Actually we could treat
9111 component_ref's of vars like vars. */
9113 tree lhs = TREE_OPERAND (exp, 0);
9114 tree rhs = TREE_OPERAND (exp, 1);
9118 /* Check for |= or &= of a bitfield of size one into another bitfield
9119 of size 1. In this case, (unless we need the result of the
9120 assignment) we can do this more efficiently with a
9121 test followed by an assignment, if necessary.
9123 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9124 things change so we do, this code should be enhanced to
9127 && TREE_CODE (lhs) == COMPONENT_REF
9128 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9129 || TREE_CODE (rhs) == BIT_AND_EXPR)
9130 && TREE_OPERAND (rhs, 0) == lhs
9131 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9132 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9133 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9135 rtx label = gen_label_rtx ();
9137 do_jump (TREE_OPERAND (rhs, 1),
9138 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9139 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9140 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9141 (TREE_CODE (rhs) == BIT_IOR_EXPR
9143 : integer_zero_node)),
9145 do_pending_stack_adjust ();
9150 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9156 if (!TREE_OPERAND (exp, 0))
9157 expand_null_return ();
9159 expand_return (TREE_OPERAND (exp, 0));
9162 case PREINCREMENT_EXPR:
9163 case PREDECREMENT_EXPR:
9164 return expand_increment (exp, 0, ignore);
9166 case POSTINCREMENT_EXPR:
9167 case POSTDECREMENT_EXPR:
9168 /* Faster to treat as pre-increment if result is not used. */
9169 return expand_increment (exp, ! ignore, ignore);
9172 if (modifier == EXPAND_STACK_PARM)
9174 /* Are we taking the address of a nested function? */
9175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9176 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9177 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9178 && ! TREE_STATIC (exp))
9180 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9181 op0 = force_operand (op0, target);
9183 /* If we are taking the address of something erroneous, just
9185 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9187 /* If we are taking the address of a constant and are at the
9188 top level, we have to use output_constant_def since we can't
9189 call force_const_mem at top level. */
9191 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9192 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9194 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9197 /* We make sure to pass const0_rtx down if we came in with
9198 ignore set, to avoid doing the cleanups twice for something. */
9199 op0 = expand_expr (TREE_OPERAND (exp, 0),
9200 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9201 (modifier == EXPAND_INITIALIZER
9202 ? modifier : EXPAND_CONST_ADDRESS));
9204 /* If we are going to ignore the result, OP0 will have been set
9205 to const0_rtx, so just return it. Don't get confused and
9206 think we are taking the address of the constant. */
9210 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9211 clever and returns a REG when given a MEM. */
9212 op0 = protect_from_queue (op0, 1);
9214 /* We would like the object in memory. If it is a constant, we can
9215 have it be statically allocated into memory. For a non-constant,
9216 we need to allocate some memory and store the value into it. */
9218 if (CONSTANT_P (op0))
9219 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9221 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9222 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9223 || GET_CODE (op0) == PARALLEL)
9225 /* If the operand is a SAVE_EXPR, we can deal with this by
9226 forcing the SAVE_EXPR into memory. */
9227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9229 put_var_into_stack (TREE_OPERAND (exp, 0),
9231 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9235 /* If this object is in a register, it can't be BLKmode. */
9236 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9237 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9239 if (GET_CODE (op0) == PARALLEL)
9240 /* Handle calls that pass values in multiple
9241 non-contiguous locations. The Irix 6 ABI has examples
9243 emit_group_store (memloc, op0,
9244 int_size_in_bytes (inner_type));
9246 emit_move_insn (memloc, op0);
9252 if (GET_CODE (op0) != MEM)
9255 mark_temp_addr_taken (op0);
9256 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9258 op0 = XEXP (op0, 0);
9259 #ifdef POINTERS_EXTEND_UNSIGNED
9260 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9261 && mode == ptr_mode)
9262 op0 = convert_memory_address (ptr_mode, op0);
9267 /* If OP0 is not aligned as least as much as the type requires, we
9268 need to make a temporary, copy OP0 to it, and take the address of
9269 the temporary. We want to use the alignment of the type, not of
9270 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9271 the test for BLKmode means that can't happen. The test for
9272 BLKmode is because we never make mis-aligned MEMs with
9275 We don't need to do this at all if the machine doesn't have
9276 strict alignment. */
9277 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9278 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9280 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9282 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9285 if (TYPE_ALIGN_OK (inner_type))
9288 if (TREE_ADDRESSABLE (inner_type))
9290 /* We can't make a bitwise copy of this object, so fail. */
9291 error ("cannot take the address of an unaligned member");
9295 new = assign_stack_temp_for_type
9296 (TYPE_MODE (inner_type),
9297 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9298 : int_size_in_bytes (inner_type),
9299 1, build_qualified_type (inner_type,
9300 (TYPE_QUALS (inner_type)
9301 | TYPE_QUAL_CONST)));
9303 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9304 (modifier == EXPAND_STACK_PARM
9305 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9310 op0 = force_operand (XEXP (op0, 0), target);
9314 && GET_CODE (op0) != REG
9315 && modifier != EXPAND_CONST_ADDRESS
9316 && modifier != EXPAND_INITIALIZER
9317 && modifier != EXPAND_SUM)
9318 op0 = force_reg (Pmode, op0);
9320 if (GET_CODE (op0) == REG
9321 && ! REG_USERVAR_P (op0))
9322 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9324 #ifdef POINTERS_EXTEND_UNSIGNED
9325 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9326 && mode == ptr_mode)
9327 op0 = convert_memory_address (ptr_mode, op0);
9332 case ENTRY_VALUE_EXPR:
9335 /* COMPLEX type for Extended Pascal & Fortran */
9338 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9341 /* Get the rtx code of the operands. */
9342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9343 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9346 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9350 /* Move the real (op0) and imaginary (op1) parts to their location. */
9351 emit_move_insn (gen_realpart (mode, target), op0);
9352 emit_move_insn (gen_imagpart (mode, target), op1);
9354 insns = get_insns ();
9357 /* Complex construction should appear as a single unit. */
9358 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9359 each with a separate pseudo as destination.
9360 It's not correct for flow to treat them as a unit. */
9361 if (GET_CODE (target) != CONCAT)
9362 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9371 return gen_realpart (mode, op0);
9374 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9375 return gen_imagpart (mode, op0);
9379 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9386 target = gen_reg_rtx (mode);
9390 /* Store the realpart and the negated imagpart to target. */
9391 emit_move_insn (gen_realpart (partmode, target),
9392 gen_realpart (partmode, op0));
9394 imag_t = gen_imagpart (partmode, target);
9395 temp = expand_unop (partmode,
9396 ! unsignedp && flag_trapv
9397 && (GET_MODE_CLASS(partmode) == MODE_INT)
9398 ? negv_optab : neg_optab,
9399 gen_imagpart (partmode, op0), imag_t, 0);
9401 emit_move_insn (imag_t, temp);
9403 insns = get_insns ();
9406 /* Conjugate should appear as a single unit
9407 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9408 each with a separate pseudo as destination.
9409 It's not correct for flow to treat them as a unit. */
9410 if (GET_CODE (target) != CONCAT)
9411 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9418 case TRY_CATCH_EXPR:
9420 tree handler = TREE_OPERAND (exp, 1);
9422 expand_eh_region_start ();
9424 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9426 expand_eh_region_end_cleanup (handler);
9431 case TRY_FINALLY_EXPR:
9433 tree try_block = TREE_OPERAND (exp, 0);
9434 tree finally_block = TREE_OPERAND (exp, 1);
9436 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9438 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9439 is not sufficient, so we cannot expand the block twice.
9440 So we play games with GOTO_SUBROUTINE_EXPR to let us
9441 expand the thing only once. */
9442 /* When not optimizing, we go ahead with this form since
9443 (1) user breakpoints operate more predictably without
9444 code duplication, and
9445 (2) we're not running any of the global optimizers
9446 that would explode in time/space with the highly
9447 connected CFG created by the indirect branching. */
9449 rtx finally_label = gen_label_rtx ();
9450 rtx done_label = gen_label_rtx ();
9451 rtx return_link = gen_reg_rtx (Pmode);
9452 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9453 (tree) finally_label, (tree) return_link);
9454 TREE_SIDE_EFFECTS (cleanup) = 1;
9456 /* Start a new binding layer that will keep track of all cleanup
9457 actions to be performed. */
9458 expand_start_bindings (2);
9459 target_temp_slot_level = temp_slot_level;
9461 expand_decl_cleanup (NULL_TREE, cleanup);
9462 op0 = expand_expr (try_block, target, tmode, modifier);
9464 preserve_temp_slots (op0);
9465 expand_end_bindings (NULL_TREE, 0, 0);
9466 emit_jump (done_label);
9467 emit_label (finally_label);
9468 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9469 emit_indirect_jump (return_link);
9470 emit_label (done_label);
9474 expand_start_bindings (2);
9475 target_temp_slot_level = temp_slot_level;
9477 expand_decl_cleanup (NULL_TREE, finally_block);
9478 op0 = expand_expr (try_block, target, tmode, modifier);
9480 preserve_temp_slots (op0);
9481 expand_end_bindings (NULL_TREE, 0, 0);
9487 case GOTO_SUBROUTINE_EXPR:
9489 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9490 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9491 rtx return_address = gen_label_rtx ();
9492 emit_move_insn (return_link,
9493 gen_rtx_LABEL_REF (Pmode, return_address));
9495 emit_label (return_address);
9500 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9503 return get_exception_pointer (cfun);
9506 /* Function descriptors are not valid except for as
9507 initialization constants, and should not be expanded. */
9511 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9514 /* Here to do an ordinary binary operator, generating an instruction
9515 from the optab already placed in `this_optab'. */
9517 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9520 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9522 if (modifier == EXPAND_STACK_PARM)
9524 temp = expand_binop (mode, this_optab, op0, op1, target,
9525 unsignedp, OPTAB_LIB_WIDEN);
9531 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9532 when applied to the address of EXP produces an address known to be
9533 aligned more than BIGGEST_ALIGNMENT. */
9536 is_aligning_offset (offset, exp)
9540 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9541 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9542 || TREE_CODE (offset) == NOP_EXPR
9543 || TREE_CODE (offset) == CONVERT_EXPR
9544 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9545 offset = TREE_OPERAND (offset, 0);
9547 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9548 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9549 if (TREE_CODE (offset) != BIT_AND_EXPR
9550 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9551 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9552 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9555 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9556 It must be NEGATE_EXPR. Then strip any more conversions. */
9557 offset = TREE_OPERAND (offset, 0);
9558 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9559 || TREE_CODE (offset) == NOP_EXPR
9560 || TREE_CODE (offset) == CONVERT_EXPR)
9561 offset = TREE_OPERAND (offset, 0);
9563 if (TREE_CODE (offset) != NEGATE_EXPR)
9566 offset = TREE_OPERAND (offset, 0);
9567 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9568 || TREE_CODE (offset) == NOP_EXPR
9569 || TREE_CODE (offset) == CONVERT_EXPR)
9570 offset = TREE_OPERAND (offset, 0);
9572 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9573 whose type is the same as EXP. */
9574 return (TREE_CODE (offset) == ADDR_EXPR
9575 && (TREE_OPERAND (offset, 0) == exp
9576 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9577 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9578 == TREE_TYPE (exp)))));
9581 /* Return the tree node if an ARG corresponds to a string constant or zero
9582 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9583 in bytes within the string that ARG is accessing. The type of the
9584 offset will be `sizetype'. */
9587 string_constant (arg, ptr_offset)
9593 if (TREE_CODE (arg) == ADDR_EXPR
9594 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9596 *ptr_offset = size_zero_node;
9597 return TREE_OPERAND (arg, 0);
9599 else if (TREE_CODE (arg) == PLUS_EXPR)
9601 tree arg0 = TREE_OPERAND (arg, 0);
9602 tree arg1 = TREE_OPERAND (arg, 1);
9607 if (TREE_CODE (arg0) == ADDR_EXPR
9608 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9610 *ptr_offset = convert (sizetype, arg1);
9611 return TREE_OPERAND (arg0, 0);
9613 else if (TREE_CODE (arg1) == ADDR_EXPR
9614 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9616 *ptr_offset = convert (sizetype, arg0);
9617 return TREE_OPERAND (arg1, 0);
9624 /* Expand code for a post- or pre- increment or decrement
9625 and return the RTX for the result.
9626 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9629 expand_increment (exp, post, ignore)
9635 tree incremented = TREE_OPERAND (exp, 0);
9636 optab this_optab = add_optab;
9638 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9639 int op0_is_copy = 0;
9640 int single_insn = 0;
9641 /* 1 means we can't store into OP0 directly,
9642 because it is a subreg narrower than a word,
9643 and we don't dare clobber the rest of the word. */
9646 /* Stabilize any component ref that might need to be
9647 evaluated more than once below. */
9649 || TREE_CODE (incremented) == BIT_FIELD_REF
9650 || (TREE_CODE (incremented) == COMPONENT_REF
9651 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9652 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9653 incremented = stabilize_reference (incremented);
9654 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9655 ones into save exprs so that they don't accidentally get evaluated
9656 more than once by the code below. */
9657 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9658 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9659 incremented = save_expr (incremented);
9661 /* Compute the operands as RTX.
9662 Note whether OP0 is the actual lvalue or a copy of it:
9663 I believe it is a copy iff it is a register or subreg
9664 and insns were generated in computing it. */
9666 temp = get_last_insn ();
9667 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9669 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9670 in place but instead must do sign- or zero-extension during assignment,
9671 so we copy it into a new register and let the code below use it as
9674 Note that we can safely modify this SUBREG since it is know not to be
9675 shared (it was made by the expand_expr call above). */
9677 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9680 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9684 else if (GET_CODE (op0) == SUBREG
9685 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9687 /* We cannot increment this SUBREG in place. If we are
9688 post-incrementing, get a copy of the old value. Otherwise,
9689 just mark that we cannot increment in place. */
9691 op0 = copy_to_reg (op0);
9696 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9697 && temp != get_last_insn ());
9698 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9700 /* Decide whether incrementing or decrementing. */
9701 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9702 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9703 this_optab = sub_optab;
9705 /* Convert decrement by a constant into a negative increment. */
9706 if (this_optab == sub_optab
9707 && GET_CODE (op1) == CONST_INT)
9709 op1 = GEN_INT (-INTVAL (op1));
9710 this_optab = add_optab;
9713 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9714 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9716 /* For a preincrement, see if we can do this with a single instruction. */
9719 icode = (int) this_optab->handlers[(int) mode].insn_code;
9720 if (icode != (int) CODE_FOR_nothing
9721 /* Make sure that OP0 is valid for operands 0 and 1
9722 of the insn we want to queue. */
9723 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9724 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9725 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9729 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9730 then we cannot just increment OP0. We must therefore contrive to
9731 increment the original value. Then, for postincrement, we can return
9732 OP0 since it is a copy of the old value. For preincrement, expand here
9733 unless we can do it with a single insn.
9735 Likewise if storing directly into OP0 would clobber high bits
9736 we need to preserve (bad_subreg). */
9737 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9739 /* This is the easiest way to increment the value wherever it is.
9740 Problems with multiple evaluation of INCREMENTED are prevented
9741 because either (1) it is a component_ref or preincrement,
9742 in which case it was stabilized above, or (2) it is an array_ref
9743 with constant index in an array in a register, which is
9744 safe to reevaluate. */
9745 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9746 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9747 ? MINUS_EXPR : PLUS_EXPR),
9750 TREE_OPERAND (exp, 1));
9752 while (TREE_CODE (incremented) == NOP_EXPR
9753 || TREE_CODE (incremented) == CONVERT_EXPR)
9755 newexp = convert (TREE_TYPE (incremented), newexp);
9756 incremented = TREE_OPERAND (incremented, 0);
9759 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9760 return post ? op0 : temp;
9765 /* We have a true reference to the value in OP0.
9766 If there is an insn to add or subtract in this mode, queue it.
9767 Queueing the increment insn avoids the register shuffling
9768 that often results if we must increment now and first save
9769 the old value for subsequent use. */
9771 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9772 op0 = stabilize (op0);
9775 icode = (int) this_optab->handlers[(int) mode].insn_code;
9776 if (icode != (int) CODE_FOR_nothing
9777 /* Make sure that OP0 is valid for operands 0 and 1
9778 of the insn we want to queue. */
9779 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9780 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9782 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9783 op1 = force_reg (mode, op1);
9785 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9787 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9789 rtx addr = (general_operand (XEXP (op0, 0), mode)
9790 ? force_reg (Pmode, XEXP (op0, 0))
9791 : copy_to_reg (XEXP (op0, 0)));
9794 op0 = replace_equiv_address (op0, addr);
9795 temp = force_reg (GET_MODE (op0), op0);
9796 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9797 op1 = force_reg (mode, op1);
9799 /* The increment queue is LIFO, thus we have to `queue'
9800 the instructions in reverse order. */
9801 enqueue_insn (op0, gen_move_insn (op0, temp));
9802 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9807 /* Preincrement, or we can't increment with one simple insn. */
9809 /* Save a copy of the value before inc or dec, to return it later. */
9810 temp = value = copy_to_reg (op0);
9812 /* Arrange to return the incremented value. */
9813 /* Copy the rtx because expand_binop will protect from the queue,
9814 and the results of that would be invalid for us to return
9815 if our caller does emit_queue before using our result. */
9816 temp = copy_rtx (value = op0);
9818 /* Increment however we can. */
9819 op1 = expand_binop (mode, this_optab, value, op1, op0,
9820 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9822 /* Make sure the value is stored into OP0. */
9824 emit_move_insn (op0, op1);
9829 /* Generate code to calculate EXP using a store-flag instruction
9830 and return an rtx for the result. EXP is either a comparison
9831 or a TRUTH_NOT_EXPR whose operand is a comparison.
9833 If TARGET is nonzero, store the result there if convenient.
9835 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9838 Return zero if there is no suitable set-flag instruction
9839 available on this machine.
9841 Once expand_expr has been called on the arguments of the comparison,
9842 we are committed to doing the store flag, since it is not safe to
9843 re-evaluate the expression. We emit the store-flag insn by calling
9844 emit_store_flag, but only expand the arguments if we have a reason
9845 to believe that emit_store_flag will be successful. If we think that
9846 it will, but it isn't, we have to simulate the store-flag with a
9847 set/jump/set sequence. */
9850 do_store_flag (exp, target, mode, only_cheap)
9853 enum machine_mode mode;
9857 tree arg0, arg1, type;
9859 enum machine_mode operand_mode;
9863 enum insn_code icode;
9864 rtx subtarget = target;
9867 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9868 result at the end. We can't simply invert the test since it would
9869 have already been inverted if it were valid. This case occurs for
9870 some floating-point comparisons. */
9872 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9873 invert = 1, exp = TREE_OPERAND (exp, 0);
9875 arg0 = TREE_OPERAND (exp, 0);
9876 arg1 = TREE_OPERAND (exp, 1);
9878 /* Don't crash if the comparison was erroneous. */
9879 if (arg0 == error_mark_node || arg1 == error_mark_node)
9882 type = TREE_TYPE (arg0);
9883 operand_mode = TYPE_MODE (type);
9884 unsignedp = TREE_UNSIGNED (type);
9886 /* We won't bother with BLKmode store-flag operations because it would mean
9887 passing a lot of information to emit_store_flag. */
9888 if (operand_mode == BLKmode)
9891 /* We won't bother with store-flag operations involving function pointers
9892 when function pointers must be canonicalized before comparisons. */
9893 #ifdef HAVE_canonicalize_funcptr_for_compare
9894 if (HAVE_canonicalize_funcptr_for_compare
9895 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9896 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9898 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9899 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9900 == FUNCTION_TYPE))))
9907 /* Get the rtx comparison code to use. We know that EXP is a comparison
9908 operation of some type. Some comparisons against 1 and -1 can be
9909 converted to comparisons with zero. Do so here so that the tests
9910 below will be aware that we have a comparison with zero. These
9911 tests will not catch constants in the first operand, but constants
9912 are rarely passed as the first operand. */
9914 switch (TREE_CODE (exp))
9923 if (integer_onep (arg1))
9924 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9926 code = unsignedp ? LTU : LT;
9929 if (! unsignedp && integer_all_onesp (arg1))
9930 arg1 = integer_zero_node, code = LT;
9932 code = unsignedp ? LEU : LE;
9935 if (! unsignedp && integer_all_onesp (arg1))
9936 arg1 = integer_zero_node, code = GE;
9938 code = unsignedp ? GTU : GT;
9941 if (integer_onep (arg1))
9942 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9944 code = unsignedp ? GEU : GE;
9947 case UNORDERED_EXPR:
9973 /* Put a constant second. */
9974 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9976 tem = arg0; arg0 = arg1; arg1 = tem;
9977 code = swap_condition (code);
9980 /* If this is an equality or inequality test of a single bit, we can
9981 do this by shifting the bit being tested to the low-order bit and
9982 masking the result with the constant 1. If the condition was EQ,
9983 we xor it with 1. This does not require an scc insn and is faster
9984 than an scc insn even if we have it. */
9986 if ((code == NE || code == EQ)
9987 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9988 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9990 tree inner = TREE_OPERAND (arg0, 0);
9991 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9994 /* If INNER is a right shift of a constant and it plus BITNUM does
9995 not overflow, adjust BITNUM and INNER. */
9997 if (TREE_CODE (inner) == RSHIFT_EXPR
9998 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9999 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10000 && bitnum < TYPE_PRECISION (type)
10001 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10002 bitnum - TYPE_PRECISION (type)))
10004 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10005 inner = TREE_OPERAND (inner, 0);
10008 /* If we are going to be able to omit the AND below, we must do our
10009 operations as unsigned. If we must use the AND, we have a choice.
10010 Normally unsigned is faster, but for some machines signed is. */
10011 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10012 #ifdef LOAD_EXTEND_OP
10013 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10019 if (! get_subtarget (subtarget)
10020 || GET_MODE (subtarget) != operand_mode
10021 || ! safe_from_p (subtarget, inner, 1))
10024 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10027 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10028 size_int (bitnum), subtarget, ops_unsignedp);
10030 if (GET_MODE (op0) != mode)
10031 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10033 if ((code == EQ && ! invert) || (code == NE && invert))
10034 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10035 ops_unsignedp, OPTAB_LIB_WIDEN);
10037 /* Put the AND last so it can combine with more things. */
10038 if (bitnum != TYPE_PRECISION (type) - 1)
10039 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10044 /* Now see if we are likely to be able to do this. Return if not. */
10045 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10048 icode = setcc_gen_code[(int) code];
10049 if (icode == CODE_FOR_nothing
10050 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10052 /* We can only do this if it is one of the special cases that
10053 can be handled without an scc insn. */
10054 if ((code == LT && integer_zerop (arg1))
10055 || (! only_cheap && code == GE && integer_zerop (arg1)))
10057 else if (BRANCH_COST >= 0
10058 && ! only_cheap && (code == NE || code == EQ)
10059 && TREE_CODE (type) != REAL_TYPE
10060 && ((abs_optab->handlers[(int) operand_mode].insn_code
10061 != CODE_FOR_nothing)
10062 || (ffs_optab->handlers[(int) operand_mode].insn_code
10063 != CODE_FOR_nothing)))
10069 if (! get_subtarget (target)
10070 || GET_MODE (subtarget) != operand_mode
10071 || ! safe_from_p (subtarget, arg1, 1))
10074 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10075 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10078 target = gen_reg_rtx (mode);
10080 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10081 because, if the emit_store_flag does anything it will succeed and
10082 OP0 and OP1 will not be used subsequently. */
10084 result = emit_store_flag (target, code,
10085 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10086 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10087 operand_mode, unsignedp, 1);
10092 result = expand_binop (mode, xor_optab, result, const1_rtx,
10093 result, 0, OPTAB_LIB_WIDEN);
10097 /* If this failed, we have to do this with set/compare/jump/set code. */
10098 if (GET_CODE (target) != REG
10099 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10100 target = gen_reg_rtx (GET_MODE (target));
10102 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10103 result = compare_from_rtx (op0, op1, code, unsignedp,
10104 operand_mode, NULL_RTX);
10105 if (GET_CODE (result) == CONST_INT)
10106 return (((result == const0_rtx && ! invert)
10107 || (result != const0_rtx && invert))
10108 ? const0_rtx : const1_rtx);
10110 /* The code of RESULT may not match CODE if compare_from_rtx
10111 decided to swap its operands and reverse the original code.
10113 We know that compare_from_rtx returns either a CONST_INT or
10114 a new comparison code, so it is safe to just extract the
10115 code from RESULT. */
10116 code = GET_CODE (result);
10118 label = gen_label_rtx ();
10119 if (bcc_gen_fctn[(int) code] == 0)
10122 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10123 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10124 emit_label (label);
10130 /* Stubs in case we haven't got a casesi insn. */
10131 #ifndef HAVE_casesi
10132 # define HAVE_casesi 0
10133 # define gen_casesi(a, b, c, d, e) (0)
10134 # define CODE_FOR_casesi CODE_FOR_nothing
10137 /* If the machine does not have a case insn that compares the bounds,
10138 this means extra overhead for dispatch tables, which raises the
10139 threshold for using them. */
10140 #ifndef CASE_VALUES_THRESHOLD
10141 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10142 #endif /* CASE_VALUES_THRESHOLD */
10145 case_values_threshold ()
10147 return CASE_VALUES_THRESHOLD;
10150 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10151 0 otherwise (i.e. if there is no casesi instruction). */
10153 try_casesi (index_type, index_expr, minval, range,
10154 table_label, default_label)
10155 tree index_type, index_expr, minval, range;
10156 rtx table_label ATTRIBUTE_UNUSED;
10159 enum machine_mode index_mode = SImode;
10160 int index_bits = GET_MODE_BITSIZE (index_mode);
10161 rtx op1, op2, index;
10162 enum machine_mode op_mode;
10167 /* Convert the index to SImode. */
10168 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10170 enum machine_mode omode = TYPE_MODE (index_type);
10171 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10173 /* We must handle the endpoints in the original mode. */
10174 index_expr = build (MINUS_EXPR, index_type,
10175 index_expr, minval);
10176 minval = integer_zero_node;
10177 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10178 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10179 omode, 1, default_label);
10180 /* Now we can safely truncate. */
10181 index = convert_to_mode (index_mode, index, 0);
10185 if (TYPE_MODE (index_type) != index_mode)
10187 index_expr = convert ((*lang_hooks.types.type_for_size)
10188 (index_bits, 0), index_expr);
10189 index_type = TREE_TYPE (index_expr);
10192 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10195 index = protect_from_queue (index, 0);
10196 do_pending_stack_adjust ();
10198 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10199 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10201 index = copy_to_mode_reg (op_mode, index);
10203 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10205 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10206 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10207 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10208 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10210 op1 = copy_to_mode_reg (op_mode, op1);
10212 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10214 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10215 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10216 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10217 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10219 op2 = copy_to_mode_reg (op_mode, op2);
10221 emit_jump_insn (gen_casesi (index, op1, op2,
10222 table_label, default_label));
10226 /* Attempt to generate a tablejump instruction; same concept. */
10227 #ifndef HAVE_tablejump
10228 #define HAVE_tablejump 0
10229 #define gen_tablejump(x, y) (0)
10232 /* Subroutine of the next function.
10234 INDEX is the value being switched on, with the lowest value
10235 in the table already subtracted.
10236 MODE is its expected mode (needed if INDEX is constant).
10237 RANGE is the length of the jump table.
10238 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10240 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10241 index value is out of range. */
10244 do_tablejump (index, mode, range, table_label, default_label)
10245 rtx index, range, table_label, default_label;
10246 enum machine_mode mode;
10250 if (INTVAL (range) > cfun->max_jumptable_ents)
10251 cfun->max_jumptable_ents = INTVAL (range);
10253 /* Do an unsigned comparison (in the proper mode) between the index
10254 expression and the value which represents the length of the range.
10255 Since we just finished subtracting the lower bound of the range
10256 from the index expression, this comparison allows us to simultaneously
10257 check that the original index expression value is both greater than
10258 or equal to the minimum value of the range and less than or equal to
10259 the maximum value of the range. */
10261 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10264 /* If index is in range, it must fit in Pmode.
10265 Convert to Pmode so we can index with it. */
10267 index = convert_to_mode (Pmode, index, 1);
10269 /* Don't let a MEM slip thru, because then INDEX that comes
10270 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10271 and break_out_memory_refs will go to work on it and mess it up. */
10272 #ifdef PIC_CASE_VECTOR_ADDRESS
10273 if (flag_pic && GET_CODE (index) != REG)
10274 index = copy_to_mode_reg (Pmode, index);
10277 /* If flag_force_addr were to affect this address
10278 it could interfere with the tricky assumptions made
10279 about addresses that contain label-refs,
10280 which may be valid only very near the tablejump itself. */
10281 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10282 GET_MODE_SIZE, because this indicates how large insns are. The other
10283 uses should all be Pmode, because they are addresses. This code
10284 could fail if addresses and insns are not the same size. */
10285 index = gen_rtx_PLUS (Pmode,
10286 gen_rtx_MULT (Pmode, index,
10287 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10288 gen_rtx_LABEL_REF (Pmode, table_label));
10289 #ifdef PIC_CASE_VECTOR_ADDRESS
10291 index = PIC_CASE_VECTOR_ADDRESS (index);
10294 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10295 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10296 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10297 RTX_UNCHANGING_P (vector) = 1;
10298 MEM_NOTRAP_P (vector) = 1;
10299 convert_move (temp, vector, 0);
10301 emit_jump_insn (gen_tablejump (temp, table_label));
10303 /* If we are generating PIC code or if the table is PC-relative, the
10304 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10305 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10310 try_tablejump (index_type, index_expr, minval, range,
10311 table_label, default_label)
10312 tree index_type, index_expr, minval, range;
10313 rtx table_label, default_label;
10317 if (! HAVE_tablejump)
10320 index_expr = fold (build (MINUS_EXPR, index_type,
10321 convert (index_type, index_expr),
10322 convert (index_type, minval)));
10323 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10325 index = protect_from_queue (index, 0);
10326 do_pending_stack_adjust ();
10328 do_tablejump (index, TYPE_MODE (index_type),
10329 convert_modes (TYPE_MODE (index_type),
10330 TYPE_MODE (TREE_TYPE (range)),
10331 expand_expr (range, NULL_RTX,
10333 TREE_UNSIGNED (TREE_TYPE (range))),
10334 table_label, default_label);
10338 /* Nonzero if the mode is a valid vector mode for this architecture.
10339 This returns nonzero even if there is no hardware support for the
10340 vector mode, but we can emulate with narrower modes. */
10343 vector_mode_valid_p (mode)
10344 enum machine_mode mode;
10346 enum mode_class class = GET_MODE_CLASS (mode);
10347 enum machine_mode innermode;
10349 /* Doh! What's going on? */
10350 if (class != MODE_VECTOR_INT
10351 && class != MODE_VECTOR_FLOAT)
10354 /* Hardware support. Woo hoo! */
10355 if (VECTOR_MODE_SUPPORTED_P (mode))
10358 innermode = GET_MODE_INNER (mode);
10360 /* We should probably return 1 if requesting V4DI and we have no DI,
10361 but we have V2DI, but this is probably very unlikely. */
10363 /* If we have support for the inner mode, we can safely emulate it.
10364 We may not have V2DI, but me can emulate with a pair of DIs. */
10365 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10368 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10370 const_vector_from_tree (exp)
10376 enum machine_mode inner, mode;
10378 mode = TYPE_MODE (TREE_TYPE (exp));
10380 if (is_zeros_p (exp))
10381 return CONST0_RTX (mode);
10383 units = GET_MODE_NUNITS (mode);
10384 inner = GET_MODE_INNER (mode);
10386 v = rtvec_alloc (units);
10388 link = TREE_VECTOR_CST_ELTS (exp);
10389 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10391 elt = TREE_VALUE (link);
10393 if (TREE_CODE (elt) == REAL_CST)
10394 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10397 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10398 TREE_INT_CST_HIGH (elt),
10402 return gen_rtx_raw_CONST_VECTOR (mode, v);
10405 #include "gt-expr.h"