1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (to_real != from_real)
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
622 emit_unop_insn (code, to, from, UNKNOWN);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
798 libcall = truncdfsf2_libfunc;
802 libcall = extenddfxf2_libfunc;
806 libcall = extenddftf2_libfunc;
818 libcall = truncxfsf2_libfunc;
822 libcall = truncxfdf2_libfunc;
834 libcall = trunctfsf2_libfunc;
838 libcall = trunctfdf2_libfunc;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
857 insns = get_insns ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
926 fill_value = const0_rtx;
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1136 enum machine_mode intermediate;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1166 emit_move_insn (to, tmp);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum stack alignment we can assume. */
1469 move_by_pieces (to, from, len, align)
1471 unsigned HOST_WIDE_INT len;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1480 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1483 data.from_addr = from_addr;
1486 to_addr = XEXP (to, 0);
1489 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1490 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1492 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1499 #ifdef STACK_GROWS_DOWNWARD
1505 data.to_addr = to_addr;
1508 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1509 || GET_CODE (from_addr) == POST_INC
1510 || GET_CODE (from_addr) == POST_DEC);
1512 data.explicit_inc_from = 0;
1513 data.explicit_inc_to = 0;
1514 if (data.reverse) data.offset = len;
1517 /* If copying requires more than two move insns,
1518 copy addresses to registers (to make displacements shorter)
1519 and use post-increment if available. */
1520 if (!(data.autinc_from && data.autinc_to)
1521 && move_by_pieces_ninsns (len, align) > 2)
1523 /* Find the mode of the largest move... */
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1531 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = -1;
1535 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1537 data.from_addr = copy_addr_to_reg (from_addr);
1538 data.autinc_from = 1;
1539 data.explicit_inc_from = 1;
1541 if (!data.autinc_from && CONSTANT_P (from_addr))
1542 data.from_addr = copy_addr_to_reg (from_addr);
1543 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1545 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1547 data.explicit_inc_to = -1;
1549 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1551 data.to_addr = copy_addr_to_reg (to_addr);
1553 data.explicit_inc_to = 1;
1555 if (!data.autinc_to && CONSTANT_P (to_addr))
1556 data.to_addr = copy_addr_to_reg (to_addr);
1559 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1560 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1561 align = MOVE_MAX * BITS_PER_UNIT;
1563 /* First move what we can in the largest integer mode, then go to
1564 successively smaller modes. */
1566 while (max_size > 1)
1568 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1569 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1570 if (GET_MODE_SIZE (tmode) < max_size)
1573 if (mode == VOIDmode)
1576 icode = mov_optab->handlers[(int) mode].insn_code;
1577 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1578 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1580 max_size = GET_MODE_SIZE (mode);
1583 /* The code above should have handled everything. */
1588 /* Return number of insns required to move L bytes by pieces.
1589 ALIGN (in bits) is maximum alignment we can assume. */
1591 static unsigned HOST_WIDE_INT
1592 move_by_pieces_ninsns (l, align)
1593 unsigned HOST_WIDE_INT l;
1596 unsigned HOST_WIDE_INT n_insns = 0;
1597 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1599 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1600 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1601 align = MOVE_MAX * BITS_PER_UNIT;
1603 while (max_size > 1)
1605 enum machine_mode mode = VOIDmode, tmode;
1606 enum insn_code icode;
1608 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1609 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1610 if (GET_MODE_SIZE (tmode) < max_size)
1613 if (mode == VOIDmode)
1616 icode = mov_optab->handlers[(int) mode].insn_code;
1617 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1618 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1620 max_size = GET_MODE_SIZE (mode);
1628 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1629 with move instructions for mode MODE. GENFUN is the gen_... function
1630 to make a move insn for that mode. DATA has all the other info. */
1633 move_by_pieces_1 (genfun, mode, data)
1634 rtx (*genfun) PARAMS ((rtx, ...));
1635 enum machine_mode mode;
1636 struct move_by_pieces *data;
1638 unsigned int size = GET_MODE_SIZE (mode);
1639 rtx to1 = NULL_RTX, from1;
1641 while (data->len >= size)
1644 data->offset -= size;
1648 if (data->autinc_to)
1649 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1652 to1 = adjust_address (data->to, mode, data->offset);
1655 if (data->autinc_from)
1656 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1659 from1 = adjust_address (data->from, mode, data->offset);
1661 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1662 emit_insn (gen_add2_insn (data->to_addr,
1663 GEN_INT (-(HOST_WIDE_INT)size)));
1664 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1665 emit_insn (gen_add2_insn (data->from_addr,
1666 GEN_INT (-(HOST_WIDE_INT)size)));
1669 emit_insn ((*genfun) (to1, from1));
1672 #ifdef PUSH_ROUNDING
1673 emit_single_push_insn (mode, from1, NULL);
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1680 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1681 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1682 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1684 if (! data->reverse)
1685 data->offset += size;
1691 /* Emit code to move a block Y to a block X. This may be done with
1692 string-move instructions, with multiple scalar move instructions,
1693 or with a library call.
1695 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1696 SIZE is an rtx that says how long they are.
1697 ALIGN is the maximum alignment we can assume they have.
1698 METHOD describes what kind of copy this is, and what mechanisms may be used.
1700 Return the address of the new block, if memcpy is called and returns it,
1704 emit_block_move (x, y, size, method)
1706 enum block_op_methods method;
1714 case BLOCK_OP_NORMAL:
1715 may_use_call = true;
1718 case BLOCK_OP_CALL_PARM:
1719 may_use_call = block_move_libcall_safe_for_call_parm ();
1721 /* Make inhibit_defer_pop nonzero around the library call
1722 to force it to pop the arguments right away. */
1726 case BLOCK_OP_NO_LIBCALL:
1727 may_use_call = false;
1734 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1736 if (GET_MODE (x) != BLKmode)
1738 if (GET_MODE (y) != BLKmode)
1741 x = protect_from_queue (x, 1);
1742 y = protect_from_queue (y, 0);
1743 size = protect_from_queue (size, 0);
1745 if (GET_CODE (x) != MEM)
1747 if (GET_CODE (y) != MEM)
1752 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1753 can be incorrect is coming from __builtin_memcpy. */
1754 if (GET_CODE (size) == CONST_INT)
1756 x = shallow_copy_rtx (x);
1757 y = shallow_copy_rtx (y);
1758 set_mem_size (x, size);
1759 set_mem_size (y, size);
1762 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1763 move_by_pieces (x, y, INTVAL (size), align);
1764 else if (emit_block_move_via_movstr (x, y, size, align))
1766 else if (may_use_call)
1767 retval = emit_block_move_via_libcall (x, y, size);
1769 emit_block_move_via_loop (x, y, size, align);
1771 if (method == BLOCK_OP_CALL_PARM)
1777 /* A subroutine of emit_block_move. Returns true if calling the
1778 block move libcall will not clobber any parameters which may have
1779 already been placed on the stack. */
1782 block_move_libcall_safe_for_call_parm ()
1788 /* Check to see whether memcpy takes all register arguments. */
1790 takes_regs_uninit, takes_regs_no, takes_regs_yes
1791 } takes_regs = takes_regs_uninit;
1795 case takes_regs_uninit:
1797 CUMULATIVE_ARGS args_so_far;
1800 fn = emit_block_move_libcall_fn (false);
1801 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1803 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1804 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1806 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1807 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1808 if (!tmp || !REG_P (tmp))
1809 goto fail_takes_regs;
1810 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1811 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1813 goto fail_takes_regs;
1815 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1818 takes_regs = takes_regs_yes;
1821 case takes_regs_yes:
1825 takes_regs = takes_regs_no;
1836 /* A subroutine of emit_block_move. Expand a movstr pattern;
1837 return true if successful. */
1840 emit_block_move_via_movstr (x, y, size, align)
1844 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1845 enum machine_mode mode;
1847 /* Since this is a move insn, we don't care about volatility. */
1850 /* Try the most limited insn first, because there's no point
1851 including more than one in the machine description unless
1852 the more limited one has some advantage. */
1854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1855 mode = GET_MODE_WIDER_MODE (mode))
1857 enum insn_code code = movstr_optab[(int) mode];
1858 insn_operand_predicate_fn pred;
1860 if (code != CODE_FOR_nothing
1861 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1862 here because if SIZE is less than the mode mask, as it is
1863 returned by the macro, it will definitely be less than the
1864 actual mode mask. */
1865 && ((GET_CODE (size) == CONST_INT
1866 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1867 <= (GET_MODE_MASK (mode) >> 1)))
1868 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1869 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1870 || (*pred) (x, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1872 || (*pred) (y, BLKmode))
1873 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1874 || (*pred) (opalign, VOIDmode)))
1877 rtx last = get_last_insn ();
1880 op2 = convert_to_mode (mode, size, 1);
1881 pred = insn_data[(int) code].operand[2].predicate;
1882 if (pred != 0 && ! (*pred) (op2, mode))
1883 op2 = copy_to_mode_reg (mode, op2);
1885 /* ??? When called via emit_block_move_for_call, it'd be
1886 nice if there were some way to inform the backend, so
1887 that it doesn't fail the expansion because it thinks
1888 emitting the libcall would be more efficient. */
1890 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1898 delete_insns_since (last);
1906 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1907 Return the return value from memcpy, 0 otherwise. */
1910 emit_block_move_via_libcall (dst, src, size)
1913 rtx dst_addr, src_addr;
1914 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1915 enum machine_mode size_mode;
1918 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1920 It is unsafe to save the value generated by protect_from_queue and reuse
1921 it later. Consider what happens if emit_queue is called before the
1922 return value from protect_from_queue is used.
1924 Expansion of the CALL_EXPR below will call emit_queue before we are
1925 finished emitting RTL for argument setup. So if we are not careful we
1926 could get the wrong value for an argument.
1928 To avoid this problem we go ahead and emit code to copy the addresses of
1929 DST and SRC and SIZE into new pseudos. We can then place those new
1930 pseudos into an RTL_EXPR and use them later, even after a call to
1933 Note this is not strictly needed for library calls since they do not call
1934 emit_queue before loading their arguments. However, we may need to have
1935 library calls call emit_queue in the future since failing to do so could
1936 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1937 arguments in registers. */
1939 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1940 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1942 #ifdef POINTERS_EXTEND_UNSIGNED
1943 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1944 src_addr = convert_memory_address (ptr_mode, src_addr);
1947 dst_tree = make_tree (ptr_type_node, dst_addr);
1948 src_tree = make_tree (ptr_type_node, src_addr);
1950 if (TARGET_MEM_FUNCTIONS)
1951 size_mode = TYPE_MODE (sizetype);
1953 size_mode = TYPE_MODE (unsigned_type_node);
1955 size = convert_to_mode (size_mode, size, 1);
1956 size = copy_to_mode_reg (size_mode, size);
1958 /* It is incorrect to use the libcall calling conventions to call
1959 memcpy in this context. This could be a user call to memcpy and
1960 the user may wish to examine the return value from memcpy. For
1961 targets where libcalls and normal calls have different conventions
1962 for returning pointers, we could end up generating incorrect code.
1964 For convenience, we generate the call to bcopy this way as well. */
1966 if (TARGET_MEM_FUNCTIONS)
1967 size_tree = make_tree (sizetype, size);
1969 size_tree = make_tree (unsigned_type_node, size);
1971 fn = emit_block_move_libcall_fn (true);
1972 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1973 if (TARGET_MEM_FUNCTIONS)
1975 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1976 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1980 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1981 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1984 /* Now we have to build up the CALL_EXPR itself. */
1985 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1986 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1987 call_expr, arg_list, NULL_TREE);
1988 TREE_SIDE_EFFECTS (call_expr) = 1;
1990 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1992 /* If we are initializing a readonly value, show the above call clobbered
1993 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1994 the delay slot scheduler might overlook conflicts and take nasty
1996 if (RTX_UNCHANGING_P (dst))
1997 add_function_usage_to
1998 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1999 gen_rtx_CLOBBER (VOIDmode, dst),
2002 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2005 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2006 for the function we use for block copies. The first time FOR_CALL
2007 is true, we call assemble_external. */
2009 static GTY(()) tree block_move_fn;
2012 init_block_move_fn (asmspec)
2013 const char *asmspec;
2019 if (TARGET_MEM_FUNCTIONS)
2021 fn = get_identifier ("memcpy");
2022 args = build_function_type_list (ptr_type_node, ptr_type_node,
2023 const_ptr_type_node, sizetype,
2028 fn = get_identifier ("bcopy");
2029 args = build_function_type_list (void_type_node, const_ptr_type_node,
2030 ptr_type_node, unsigned_type_node,
2034 fn = build_decl (FUNCTION_DECL, fn, args);
2035 DECL_EXTERNAL (fn) = 1;
2036 TREE_PUBLIC (fn) = 1;
2037 DECL_ARTIFICIAL (fn) = 1;
2038 TREE_NOTHROW (fn) = 1;
2045 SET_DECL_RTL (block_move_fn, NULL_RTX);
2046 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2051 emit_block_move_libcall_fn (for_call)
2054 static bool emitted_extern;
2057 init_block_move_fn (NULL);
2059 if (for_call && !emitted_extern)
2061 emitted_extern = true;
2062 make_decl_rtl (block_move_fn, NULL);
2063 assemble_external (block_move_fn);
2066 return block_move_fn;
2069 /* A subroutine of emit_block_move. Copy the data via an explicit
2070 loop. This is used only when libcalls are forbidden. */
2071 /* ??? It'd be nice to copy in hunks larger than QImode. */
2074 emit_block_move_via_loop (x, y, size, align)
2076 unsigned int align ATTRIBUTE_UNUSED;
2078 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2079 enum machine_mode iter_mode;
2081 iter_mode = GET_MODE (size);
2082 if (iter_mode == VOIDmode)
2083 iter_mode = word_mode;
2085 top_label = gen_label_rtx ();
2086 cmp_label = gen_label_rtx ();
2087 iter = gen_reg_rtx (iter_mode);
2089 emit_move_insn (iter, const0_rtx);
2091 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2092 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2093 do_pending_stack_adjust ();
2095 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2097 emit_jump (cmp_label);
2098 emit_label (top_label);
2100 tmp = convert_modes (Pmode, iter_mode, iter, true);
2101 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2102 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2103 x = change_address (x, QImode, x_addr);
2104 y = change_address (y, QImode, y_addr);
2106 emit_move_insn (x, y);
2108 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2109 true, OPTAB_LIB_WIDEN);
2111 emit_move_insn (iter, tmp);
2113 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2114 emit_label (cmp_label);
2116 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2119 emit_note (NULL, NOTE_INSN_LOOP_END);
2122 /* Copy all or part of a value X into registers starting at REGNO.
2123 The number of registers to be filled is NREGS. */
2126 move_block_to_reg (regno, x, nregs, mode)
2130 enum machine_mode mode;
2133 #ifdef HAVE_load_multiple
2141 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2142 x = validize_mem (force_const_mem (mode, x));
2144 /* See if the machine can do this with a load multiple insn. */
2145 #ifdef HAVE_load_multiple
2146 if (HAVE_load_multiple)
2148 last = get_last_insn ();
2149 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2157 delete_insns_since (last);
2161 for (i = 0; i < nregs; i++)
2162 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2163 operand_subword_force (x, i, mode));
2166 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2167 The number of registers to be filled is NREGS. */
2170 move_block_from_reg (regno, x, nregs)
2180 /* See if the machine can do this with a store multiple insn. */
2181 #ifdef HAVE_store_multiple
2182 if (HAVE_store_multiple)
2184 rtx last = get_last_insn ();
2185 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2193 delete_insns_since (last);
2197 for (i = 0; i < nregs; i++)
2199 rtx tem = operand_subword (x, i, 1, BLKmode);
2204 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2208 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2209 ORIG, where ORIG is a non-consecutive group of registers represented by
2210 a PARALLEL. The clone is identical to the original except in that the
2211 original set of registers is replaced by a new set of pseudo registers.
2212 The new set has the same modes as the original set. */
2215 gen_group_rtx (orig)
2221 if (GET_CODE (orig) != PARALLEL)
2224 length = XVECLEN (orig, 0);
2225 tmps = (rtx *) alloca (sizeof (rtx) * length);
2227 /* Skip a NULL entry in first slot. */
2228 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2233 for (; i < length; i++)
2235 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2236 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2238 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2241 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2244 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2245 registers represented by a PARALLEL. SSIZE represents the total size of
2246 block SRC in bytes, or -1 if not known. */
2247 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2248 the balance will be in what would be the low-order memory addresses, i.e.
2249 left justified for big endian, right justified for little endian. This
2250 happens to be true for the targets currently using this support. If this
2251 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2255 emit_group_load (dst, orig_src, ssize)
2262 if (GET_CODE (dst) != PARALLEL)
2265 /* Check for a NULL entry, used to indicate that the parameter goes
2266 both on the stack and in registers. */
2267 if (XEXP (XVECEXP (dst, 0, 0), 0))
2272 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2274 /* Process the pieces. */
2275 for (i = start; i < XVECLEN (dst, 0); i++)
2277 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2278 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2279 unsigned int bytelen = GET_MODE_SIZE (mode);
2282 /* Handle trailing fragments that run over the size of the struct. */
2283 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2285 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2286 bytelen = ssize - bytepos;
2291 /* If we won't be loading directly from memory, protect the real source
2292 from strange tricks we might play; but make sure that the source can
2293 be loaded directly into the destination. */
2295 if (GET_CODE (orig_src) != MEM
2296 && (!CONSTANT_P (orig_src)
2297 || (GET_MODE (orig_src) != mode
2298 && GET_MODE (orig_src) != VOIDmode)))
2300 if (GET_MODE (orig_src) == VOIDmode)
2301 src = gen_reg_rtx (mode);
2303 src = gen_reg_rtx (GET_MODE (orig_src));
2305 emit_move_insn (src, orig_src);
2308 /* Optimize the access just a bit. */
2309 if (GET_CODE (src) == MEM
2310 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2311 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2312 && bytelen == GET_MODE_SIZE (mode))
2314 tmps[i] = gen_reg_rtx (mode);
2315 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2317 else if (GET_CODE (src) == CONCAT)
2319 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2320 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2322 if ((bytepos == 0 && bytelen == slen0)
2323 || (bytepos != 0 && bytepos + bytelen <= slen))
2325 /* The following assumes that the concatenated objects all
2326 have the same size. In this case, a simple calculation
2327 can be used to determine the object and the bit field
2329 tmps[i] = XEXP (src, bytepos / slen0);
2330 if (! CONSTANT_P (tmps[i])
2331 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2332 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2333 (bytepos % slen0) * BITS_PER_UNIT,
2334 1, NULL_RTX, mode, mode, ssize);
2336 else if (bytepos == 0)
2338 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2339 emit_move_insn (mem, src);
2340 tmps[i] = adjust_address (mem, mode, 0);
2345 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2346 SIMD register, which is currently broken. While we get GCC
2347 to emit proper RTL for these cases, let's dump to memory. */
2348 else if (VECTOR_MODE_P (GET_MODE (dst))
2349 && GET_CODE (src) == REG)
2351 int slen = GET_MODE_SIZE (GET_MODE (src));
2354 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2355 emit_move_insn (mem, src);
2356 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2358 else if (CONSTANT_P (src)
2359 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2362 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2363 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2366 if (BYTES_BIG_ENDIAN && shift)
2367 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2368 tmps[i], 0, OPTAB_WIDEN);
2373 /* Copy the extracted pieces into the proper (probable) hard regs. */
2374 for (i = start; i < XVECLEN (dst, 0); i++)
2375 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2378 /* Emit code to move a block SRC to block DST, where SRC and DST are
2379 non-consecutive groups of registers, each represented by a PARALLEL. */
2382 emit_group_move (dst, src)
2387 if (GET_CODE (src) != PARALLEL
2388 || GET_CODE (dst) != PARALLEL
2389 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2392 /* Skip first entry if NULL. */
2393 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2394 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2395 XEXP (XVECEXP (src, 0, i), 0));
2398 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2399 registers represented by a PARALLEL. SSIZE represents the total size of
2400 block DST, or -1 if not known. */
2403 emit_group_store (orig_dst, src, ssize)
2410 if (GET_CODE (src) != PARALLEL)
2413 /* Check for a NULL entry, used to indicate that the parameter goes
2414 both on the stack and in registers. */
2415 if (XEXP (XVECEXP (src, 0, 0), 0))
2420 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2422 /* Copy the (probable) hard regs into pseudos. */
2423 for (i = start; i < XVECLEN (src, 0); i++)
2425 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2426 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2427 emit_move_insn (tmps[i], reg);
2431 /* If we won't be storing directly into memory, protect the real destination
2432 from strange tricks we might play. */
2434 if (GET_CODE (dst) == PARALLEL)
2438 /* We can get a PARALLEL dst if there is a conditional expression in
2439 a return statement. In that case, the dst and src are the same,
2440 so no action is necessary. */
2441 if (rtx_equal_p (dst, src))
2444 /* It is unclear if we can ever reach here, but we may as well handle
2445 it. Allocate a temporary, and split this into a store/load to/from
2448 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2449 emit_group_store (temp, src, ssize);
2450 emit_group_load (dst, temp, ssize);
2453 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2455 dst = gen_reg_rtx (GET_MODE (orig_dst));
2456 /* Make life a bit easier for combine. */
2457 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2460 /* Process the pieces. */
2461 for (i = start; i < XVECLEN (src, 0); i++)
2463 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2464 enum machine_mode mode = GET_MODE (tmps[i]);
2465 unsigned int bytelen = GET_MODE_SIZE (mode);
2468 /* Handle trailing fragments that run over the size of the struct. */
2469 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2471 if (BYTES_BIG_ENDIAN)
2473 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2474 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2475 tmps[i], 0, OPTAB_WIDEN);
2477 bytelen = ssize - bytepos;
2480 if (GET_CODE (dst) == CONCAT)
2482 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2483 dest = XEXP (dst, 0);
2484 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2486 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2487 dest = XEXP (dst, 1);
2489 else if (bytepos == 0 && XVECLEN (src, 0))
2491 dest = assign_stack_temp (GET_MODE (dest),
2492 GET_MODE_SIZE (GET_MODE (dest)), 0);
2493 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2502 /* Optimize the access just a bit. */
2503 if (GET_CODE (dest) == MEM
2504 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2505 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2506 && bytelen == GET_MODE_SIZE (mode))
2507 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2509 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2510 mode, tmps[i], ssize);
2515 /* Copy from the pseudo into the (probable) hard reg. */
2516 if (orig_dst != dst)
2517 emit_move_insn (orig_dst, dst);
2520 /* Generate code to copy a BLKmode object of TYPE out of a
2521 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2522 is null, a stack temporary is created. TGTBLK is returned.
2524 The primary purpose of this routine is to handle functions
2525 that return BLKmode structures in registers. Some machines
2526 (the PA for example) want to return all small structures
2527 in registers regardless of the structure's alignment. */
2530 copy_blkmode_from_reg (tgtblk, srcreg, type)
2535 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2536 rtx src = NULL, dst = NULL;
2537 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2538 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2542 tgtblk = assign_temp (build_qualified_type (type,
2544 | TYPE_QUAL_CONST)),
2546 preserve_temp_slots (tgtblk);
2549 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2550 into a new pseudo which is a full word. */
2552 if (GET_MODE (srcreg) != BLKmode
2553 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2554 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2556 /* Structures whose size is not a multiple of a word are aligned
2557 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2558 machine, this means we must skip the empty high order bytes when
2559 calculating the bit offset. */
2560 if (BYTES_BIG_ENDIAN
2561 && bytes % UNITS_PER_WORD)
2562 big_endian_correction
2563 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2565 /* Copy the structure BITSIZE bites at a time.
2567 We could probably emit more efficient code for machines which do not use
2568 strict alignment, but it doesn't seem worth the effort at the current
2570 for (bitpos = 0, xbitpos = big_endian_correction;
2571 bitpos < bytes * BITS_PER_UNIT;
2572 bitpos += bitsize, xbitpos += bitsize)
2574 /* We need a new source operand each time xbitpos is on a
2575 word boundary and when xbitpos == big_endian_correction
2576 (the first time through). */
2577 if (xbitpos % BITS_PER_WORD == 0
2578 || xbitpos == big_endian_correction)
2579 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2582 /* We need a new destination operand each time bitpos is on
2584 if (bitpos % BITS_PER_WORD == 0)
2585 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2587 /* Use xbitpos for the source extraction (right justified) and
2588 xbitpos for the destination store (left justified). */
2589 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2590 extract_bit_field (src, bitsize,
2591 xbitpos % BITS_PER_WORD, 1,
2592 NULL_RTX, word_mode, word_mode,
2600 /* Add a USE expression for REG to the (possibly empty) list pointed
2601 to by CALL_FUSAGE. REG must denote a hard register. */
2604 use_reg (call_fusage, reg)
2605 rtx *call_fusage, reg;
2607 if (GET_CODE (reg) != REG
2608 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2612 = gen_rtx_EXPR_LIST (VOIDmode,
2613 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2616 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2617 starting at REGNO. All of these registers must be hard registers. */
2620 use_regs (call_fusage, regno, nregs)
2627 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2630 for (i = 0; i < nregs; i++)
2631 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2634 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2635 PARALLEL REGS. This is for calls that pass values in multiple
2636 non-contiguous locations. The Irix 6 ABI has examples of this. */
2639 use_group_regs (call_fusage, regs)
2645 for (i = 0; i < XVECLEN (regs, 0); i++)
2647 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2649 /* A NULL entry means the parameter goes both on the stack and in
2650 registers. This can also be a MEM for targets that pass values
2651 partially on the stack and partially in registers. */
2652 if (reg != 0 && GET_CODE (reg) == REG)
2653 use_reg (call_fusage, reg);
2658 /* Determine whether the LEN bytes generated by CONSTFUN can be
2659 stored to memory using several move instructions. CONSTFUNDATA is
2660 a pointer which will be passed as argument in every CONSTFUN call.
2661 ALIGN is maximum alignment we can assume. Return nonzero if a
2662 call to store_by_pieces should succeed. */
2665 can_store_by_pieces (len, constfun, constfundata, align)
2666 unsigned HOST_WIDE_INT len;
2667 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2671 unsigned HOST_WIDE_INT max_size, l;
2672 HOST_WIDE_INT offset = 0;
2673 enum machine_mode mode, tmode;
2674 enum insn_code icode;
2678 if (! STORE_BY_PIECES_P (len, align))
2681 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2682 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2683 align = MOVE_MAX * BITS_PER_UNIT;
2685 /* We would first store what we can in the largest integer mode, then go to
2686 successively smaller modes. */
2689 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2694 max_size = STORE_MAX_PIECES + 1;
2695 while (max_size > 1)
2697 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2698 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2699 if (GET_MODE_SIZE (tmode) < max_size)
2702 if (mode == VOIDmode)
2705 icode = mov_optab->handlers[(int) mode].insn_code;
2706 if (icode != CODE_FOR_nothing
2707 && align >= GET_MODE_ALIGNMENT (mode))
2709 unsigned int size = GET_MODE_SIZE (mode);
2716 cst = (*constfun) (constfundata, offset, mode);
2717 if (!LEGITIMATE_CONSTANT_P (cst))
2727 max_size = GET_MODE_SIZE (mode);
2730 /* The code above should have handled everything. */
2738 /* Generate several move instructions to store LEN bytes generated by
2739 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2740 pointer which will be passed as argument in every CONSTFUN call.
2741 ALIGN is maximum alignment we can assume. */
2744 store_by_pieces (to, len, constfun, constfundata, align)
2746 unsigned HOST_WIDE_INT len;
2747 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2751 struct store_by_pieces data;
2753 if (! STORE_BY_PIECES_P (len, align))
2755 to = protect_from_queue (to, 1);
2756 data.constfun = constfun;
2757 data.constfundata = constfundata;
2760 store_by_pieces_1 (&data, align);
2763 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2764 rtx with BLKmode). The caller must pass TO through protect_from_queue
2765 before calling. ALIGN is maximum alignment we can assume. */
2768 clear_by_pieces (to, len, align)
2770 unsigned HOST_WIDE_INT len;
2773 struct store_by_pieces data;
2775 data.constfun = clear_by_pieces_1;
2776 data.constfundata = NULL;
2779 store_by_pieces_1 (&data, align);
2782 /* Callback routine for clear_by_pieces.
2783 Return const0_rtx unconditionally. */
2786 clear_by_pieces_1 (data, offset, mode)
2787 PTR data ATTRIBUTE_UNUSED;
2788 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2789 enum machine_mode mode ATTRIBUTE_UNUSED;
2794 /* Subroutine of clear_by_pieces and store_by_pieces.
2795 Generate several move instructions to store LEN bytes of block TO. (A MEM
2796 rtx with BLKmode). The caller must pass TO through protect_from_queue
2797 before calling. ALIGN is maximum alignment we can assume. */
2800 store_by_pieces_1 (data, align)
2801 struct store_by_pieces *data;
2804 rtx to_addr = XEXP (data->to, 0);
2805 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2806 enum machine_mode mode = VOIDmode, tmode;
2807 enum insn_code icode;
2810 data->to_addr = to_addr;
2812 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2813 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2815 data->explicit_inc_to = 0;
2817 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2819 data->offset = data->len;
2821 /* If storing requires more than two move insns,
2822 copy addresses to registers (to make displacements shorter)
2823 and use post-increment if available. */
2824 if (!data->autinc_to
2825 && move_by_pieces_ninsns (data->len, align) > 2)
2827 /* Determine the main mode we'll be using. */
2828 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2829 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2830 if (GET_MODE_SIZE (tmode) < max_size)
2833 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2835 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2836 data->autinc_to = 1;
2837 data->explicit_inc_to = -1;
2840 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2841 && ! data->autinc_to)
2843 data->to_addr = copy_addr_to_reg (to_addr);
2844 data->autinc_to = 1;
2845 data->explicit_inc_to = 1;
2848 if ( !data->autinc_to && CONSTANT_P (to_addr))
2849 data->to_addr = copy_addr_to_reg (to_addr);
2852 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2853 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2854 align = MOVE_MAX * BITS_PER_UNIT;
2856 /* First store what we can in the largest integer mode, then go to
2857 successively smaller modes. */
2859 while (max_size > 1)
2861 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2862 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2863 if (GET_MODE_SIZE (tmode) < max_size)
2866 if (mode == VOIDmode)
2869 icode = mov_optab->handlers[(int) mode].insn_code;
2870 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2871 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2873 max_size = GET_MODE_SIZE (mode);
2876 /* The code above should have handled everything. */
2881 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2882 with move instructions for mode MODE. GENFUN is the gen_... function
2883 to make a move insn for that mode. DATA has all the other info. */
2886 store_by_pieces_2 (genfun, mode, data)
2887 rtx (*genfun) PARAMS ((rtx, ...));
2888 enum machine_mode mode;
2889 struct store_by_pieces *data;
2891 unsigned int size = GET_MODE_SIZE (mode);
2894 while (data->len >= size)
2897 data->offset -= size;
2899 if (data->autinc_to)
2900 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2903 to1 = adjust_address (data->to, mode, data->offset);
2905 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2906 emit_insn (gen_add2_insn (data->to_addr,
2907 GEN_INT (-(HOST_WIDE_INT) size)));
2909 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2910 emit_insn ((*genfun) (to1, cst));
2912 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2913 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2915 if (! data->reverse)
2916 data->offset += size;
2922 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2923 its length in bytes. */
2926 clear_storage (object, size)
2931 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2932 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2934 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2935 just move a zero. Otherwise, do this a piece at a time. */
2936 if (GET_MODE (object) != BLKmode
2937 && GET_CODE (size) == CONST_INT
2938 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2939 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2942 object = protect_from_queue (object, 1);
2943 size = protect_from_queue (size, 0);
2945 if (GET_CODE (size) == CONST_INT
2946 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2947 clear_by_pieces (object, INTVAL (size), align);
2948 else if (clear_storage_via_clrstr (object, size, align))
2951 retval = clear_storage_via_libcall (object, size);
2957 /* A subroutine of clear_storage. Expand a clrstr pattern;
2958 return true if successful. */
2961 clear_storage_via_clrstr (object, size, align)
2965 /* Try the most limited insn first, because there's no point
2966 including more than one in the machine description unless
2967 the more limited one has some advantage. */
2969 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2970 enum machine_mode mode;
2972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2973 mode = GET_MODE_WIDER_MODE (mode))
2975 enum insn_code code = clrstr_optab[(int) mode];
2976 insn_operand_predicate_fn pred;
2978 if (code != CODE_FOR_nothing
2979 /* We don't need MODE to be narrower than
2980 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2981 the mode mask, as it is returned by the macro, it will
2982 definitely be less than the actual mode mask. */
2983 && ((GET_CODE (size) == CONST_INT
2984 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2985 <= (GET_MODE_MASK (mode) >> 1)))
2986 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2987 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2988 || (*pred) (object, BLKmode))
2989 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2990 || (*pred) (opalign, VOIDmode)))
2993 rtx last = get_last_insn ();
2996 op1 = convert_to_mode (mode, size, 1);
2997 pred = insn_data[(int) code].operand[1].predicate;
2998 if (pred != 0 && ! (*pred) (op1, mode))
2999 op1 = copy_to_mode_reg (mode, op1);
3001 pat = GEN_FCN ((int) code) (object, op1, opalign);
3008 delete_insns_since (last);
3015 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3016 Return the return value of memset, 0 otherwise. */
3019 clear_storage_via_libcall (object, size)
3022 tree call_expr, arg_list, fn, object_tree, size_tree;
3023 enum machine_mode size_mode;
3026 /* OBJECT or SIZE may have been passed through protect_from_queue.
3028 It is unsafe to save the value generated by protect_from_queue
3029 and reuse it later. Consider what happens if emit_queue is
3030 called before the return value from protect_from_queue is used.
3032 Expansion of the CALL_EXPR below will call emit_queue before
3033 we are finished emitting RTL for argument setup. So if we are
3034 not careful we could get the wrong value for an argument.
3036 To avoid this problem we go ahead and emit code to copy OBJECT
3037 and SIZE into new pseudos. We can then place those new pseudos
3038 into an RTL_EXPR and use them later, even after a call to
3041 Note this is not strictly needed for library calls since they
3042 do not call emit_queue before loading their arguments. However,
3043 we may need to have library calls call emit_queue in the future
3044 since failing to do so could cause problems for targets which
3045 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3047 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3049 if (TARGET_MEM_FUNCTIONS)
3050 size_mode = TYPE_MODE (sizetype);
3052 size_mode = TYPE_MODE (unsigned_type_node);
3053 size = convert_to_mode (size_mode, size, 1);
3054 size = copy_to_mode_reg (size_mode, size);
3056 /* It is incorrect to use the libcall calling conventions to call
3057 memset in this context. This could be a user call to memset and
3058 the user may wish to examine the return value from memset. For
3059 targets where libcalls and normal calls have different conventions
3060 for returning pointers, we could end up generating incorrect code.
3062 For convenience, we generate the call to bzero this way as well. */
3064 object_tree = make_tree (ptr_type_node, object);
3065 if (TARGET_MEM_FUNCTIONS)
3066 size_tree = make_tree (sizetype, size);
3068 size_tree = make_tree (unsigned_type_node, size);
3070 fn = clear_storage_libcall_fn (true);
3071 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3072 if (TARGET_MEM_FUNCTIONS)
3073 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3074 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3076 /* Now we have to build up the CALL_EXPR itself. */
3077 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3078 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3079 call_expr, arg_list, NULL_TREE);
3080 TREE_SIDE_EFFECTS (call_expr) = 1;
3082 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3084 /* If we are initializing a readonly value, show the above call
3085 clobbered it. Otherwise, a load from it may erroneously be
3086 hoisted from a loop. */
3087 if (RTX_UNCHANGING_P (object))
3088 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3090 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3093 /* A subroutine of clear_storage_via_libcall. Create the tree node
3094 for the function we use for block clears. The first time FOR_CALL
3095 is true, we call assemble_external. */
3097 static GTY(()) tree block_clear_fn;
3100 init_block_clear_fn (asmspec)
3101 const char *asmspec;
3103 if (!block_clear_fn)
3107 if (TARGET_MEM_FUNCTIONS)
3109 fn = get_identifier ("memset");
3110 args = build_function_type_list (ptr_type_node, ptr_type_node,
3111 integer_type_node, sizetype,
3116 fn = get_identifier ("bzero");
3117 args = build_function_type_list (void_type_node, ptr_type_node,
3118 unsigned_type_node, NULL_TREE);
3121 fn = build_decl (FUNCTION_DECL, fn, args);
3122 DECL_EXTERNAL (fn) = 1;
3123 TREE_PUBLIC (fn) = 1;
3124 DECL_ARTIFICIAL (fn) = 1;
3125 TREE_NOTHROW (fn) = 1;
3127 block_clear_fn = fn;
3132 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3133 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3138 clear_storage_libcall_fn (for_call)
3141 static bool emitted_extern;
3143 if (!block_clear_fn)
3144 init_block_clear_fn (NULL);
3146 if (for_call && !emitted_extern)
3148 emitted_extern = true;
3149 make_decl_rtl (block_clear_fn, NULL);
3150 assemble_external (block_clear_fn);
3153 return block_clear_fn;
3156 /* Generate code to copy Y into X.
3157 Both Y and X must have the same mode, except that
3158 Y can be a constant with VOIDmode.
3159 This mode cannot be BLKmode; use emit_block_move for that.
3161 Return the last instruction emitted. */
3164 emit_move_insn (x, y)
3167 enum machine_mode mode = GET_MODE (x);
3168 rtx y_cst = NULL_RTX;
3171 x = protect_from_queue (x, 1);
3172 y = protect_from_queue (y, 0);
3174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3177 /* Never force constant_p_rtx to memory. */
3178 if (GET_CODE (y) == CONSTANT_P_RTX)
3180 else if (CONSTANT_P (y))
3183 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3184 && (last_insn = compress_float_constant (x, y)))
3187 if (!LEGITIMATE_CONSTANT_P (y))
3190 y = force_const_mem (mode, y);
3192 /* If the target's cannot_force_const_mem prevented the spill,
3193 assume that the target's move expanders will also take care
3194 of the non-legitimate constant. */
3200 /* If X or Y are memory references, verify that their addresses are valid
3202 if (GET_CODE (x) == MEM
3203 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3204 && ! push_operand (x, GET_MODE (x)))
3206 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3207 x = validize_mem (x);
3209 if (GET_CODE (y) == MEM
3210 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3212 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3213 y = validize_mem (y);
3215 if (mode == BLKmode)
3218 last_insn = emit_move_insn_1 (x, y);
3220 if (y_cst && GET_CODE (x) == REG)
3221 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3226 /* Low level part of emit_move_insn.
3227 Called just like emit_move_insn, but assumes X and Y
3228 are basically valid. */
3231 emit_move_insn_1 (x, y)
3234 enum machine_mode mode = GET_MODE (x);
3235 enum machine_mode submode;
3236 enum mode_class class = GET_MODE_CLASS (mode);
3238 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3241 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3243 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3245 /* Expand complex moves by moving real part and imag part, if possible. */
3246 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3247 && BLKmode != (submode = GET_MODE_INNER (mode))
3248 && (mov_optab->handlers[(int) submode].insn_code
3249 != CODE_FOR_nothing))
3251 /* Don't split destination if it is a stack push. */
3252 int stack = push_operand (x, GET_MODE (x));
3254 #ifdef PUSH_ROUNDING
3255 /* In case we output to the stack, but the size is smaller machine can
3256 push exactly, we need to use move instructions. */
3258 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3259 != GET_MODE_SIZE (submode)))
3262 HOST_WIDE_INT offset1, offset2;
3264 /* Do not use anti_adjust_stack, since we don't want to update
3265 stack_pointer_delta. */
3266 temp = expand_binop (Pmode,
3267 #ifdef STACK_GROWS_DOWNWARD
3275 (GET_MODE_SIZE (GET_MODE (x)))),
3276 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3278 if (temp != stack_pointer_rtx)
3279 emit_move_insn (stack_pointer_rtx, temp);
3281 #ifdef STACK_GROWS_DOWNWARD
3283 offset2 = GET_MODE_SIZE (submode);
3285 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3286 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3287 + GET_MODE_SIZE (submode));
3290 emit_move_insn (change_address (x, submode,
3291 gen_rtx_PLUS (Pmode,
3293 GEN_INT (offset1))),
3294 gen_realpart (submode, y));
3295 emit_move_insn (change_address (x, submode,
3296 gen_rtx_PLUS (Pmode,
3298 GEN_INT (offset2))),
3299 gen_imagpart (submode, y));
3303 /* If this is a stack, push the highpart first, so it
3304 will be in the argument order.
3306 In that case, change_address is used only to convert
3307 the mode, not to change the address. */
3310 /* Note that the real part always precedes the imag part in memory
3311 regardless of machine's endianness. */
3312 #ifdef STACK_GROWS_DOWNWARD
3313 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3314 (gen_rtx_MEM (submode, XEXP (x, 0)),
3315 gen_imagpart (submode, y)));
3316 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3317 (gen_rtx_MEM (submode, XEXP (x, 0)),
3318 gen_realpart (submode, y)));
3320 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3321 (gen_rtx_MEM (submode, XEXP (x, 0)),
3322 gen_realpart (submode, y)));
3323 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3324 (gen_rtx_MEM (submode, XEXP (x, 0)),
3325 gen_imagpart (submode, y)));
3330 rtx realpart_x, realpart_y;
3331 rtx imagpart_x, imagpart_y;
3333 /* If this is a complex value with each part being smaller than a
3334 word, the usual calling sequence will likely pack the pieces into
3335 a single register. Unfortunately, SUBREG of hard registers only
3336 deals in terms of words, so we have a problem converting input
3337 arguments to the CONCAT of two registers that is used elsewhere
3338 for complex values. If this is before reload, we can copy it into
3339 memory and reload. FIXME, we should see about using extract and
3340 insert on integer registers, but complex short and complex char
3341 variables should be rarely used. */
3342 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3343 && (reload_in_progress | reload_completed) == 0)
3346 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3348 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3350 if (packed_dest_p || packed_src_p)
3352 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3353 ? MODE_FLOAT : MODE_INT);
3355 enum machine_mode reg_mode
3356 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3358 if (reg_mode != BLKmode)
3360 rtx mem = assign_stack_temp (reg_mode,
3361 GET_MODE_SIZE (mode), 0);
3362 rtx cmem = adjust_address (mem, mode, 0);
3365 = N_("function using short complex types cannot be inline");
3369 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3371 emit_move_insn_1 (cmem, y);
3372 return emit_move_insn_1 (sreg, mem);
3376 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3378 emit_move_insn_1 (mem, sreg);
3379 return emit_move_insn_1 (x, cmem);
3385 realpart_x = gen_realpart (submode, x);
3386 realpart_y = gen_realpart (submode, y);
3387 imagpart_x = gen_imagpart (submode, x);
3388 imagpart_y = gen_imagpart (submode, y);
3390 /* Show the output dies here. This is necessary for SUBREGs
3391 of pseudos since we cannot track their lifetimes correctly;
3392 hard regs shouldn't appear here except as return values.
3393 We never want to emit such a clobber after reload. */
3395 && ! (reload_in_progress || reload_completed)
3396 && (GET_CODE (realpart_x) == SUBREG
3397 || GET_CODE (imagpart_x) == SUBREG))
3398 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3400 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3401 (realpart_x, realpart_y));
3402 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3403 (imagpart_x, imagpart_y));
3406 return get_last_insn ();
3409 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3410 find a mode to do it in. If we have a movcc, use it. Otherwise,
3411 find the MODE_INT mode of the same width. */
3412 else if (GET_MODE_CLASS (mode) == MODE_CC
3413 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3415 enum insn_code insn_code;
3416 enum machine_mode tmode = VOIDmode;
3420 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3423 for (tmode = QImode; tmode != VOIDmode;
3424 tmode = GET_MODE_WIDER_MODE (tmode))
3425 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3428 if (tmode == VOIDmode)
3431 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3432 may call change_address which is not appropriate if we were
3433 called when a reload was in progress. We don't have to worry
3434 about changing the address since the size in bytes is supposed to
3435 be the same. Copy the MEM to change the mode and move any
3436 substitutions from the old MEM to the new one. */
3438 if (reload_in_progress)
3440 x = gen_lowpart_common (tmode, x1);
3441 if (x == 0 && GET_CODE (x1) == MEM)
3443 x = adjust_address_nv (x1, tmode, 0);
3444 copy_replacements (x1, x);
3447 y = gen_lowpart_common (tmode, y1);
3448 if (y == 0 && GET_CODE (y1) == MEM)
3450 y = adjust_address_nv (y1, tmode, 0);
3451 copy_replacements (y1, y);
3456 x = gen_lowpart (tmode, x);
3457 y = gen_lowpart (tmode, y);
3460 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3461 return emit_insn (GEN_FCN (insn_code) (x, y));
3464 /* This will handle any multi-word or full-word mode that lacks a move_insn
3465 pattern. However, you will get better code if you define such patterns,
3466 even if they must turn into multiple assembler instructions. */
3467 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3474 #ifdef PUSH_ROUNDING
3476 /* If X is a push on the stack, do the push now and replace
3477 X with a reference to the stack pointer. */
3478 if (push_operand (x, GET_MODE (x)))
3483 /* Do not use anti_adjust_stack, since we don't want to update
3484 stack_pointer_delta. */
3485 temp = expand_binop (Pmode,
3486 #ifdef STACK_GROWS_DOWNWARD
3494 (GET_MODE_SIZE (GET_MODE (x)))),
3495 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3497 if (temp != stack_pointer_rtx)
3498 emit_move_insn (stack_pointer_rtx, temp);
3500 code = GET_CODE (XEXP (x, 0));
3502 /* Just hope that small offsets off SP are OK. */
3503 if (code == POST_INC)
3504 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3505 GEN_INT (-((HOST_WIDE_INT)
3506 GET_MODE_SIZE (GET_MODE (x)))));
3507 else if (code == POST_DEC)
3508 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3509 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3511 temp = stack_pointer_rtx;
3513 x = change_address (x, VOIDmode, temp);
3517 /* If we are in reload, see if either operand is a MEM whose address
3518 is scheduled for replacement. */
3519 if (reload_in_progress && GET_CODE (x) == MEM
3520 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3521 x = replace_equiv_address_nv (x, inner);
3522 if (reload_in_progress && GET_CODE (y) == MEM
3523 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3524 y = replace_equiv_address_nv (y, inner);
3530 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3533 rtx xpart = operand_subword (x, i, 1, mode);
3534 rtx ypart = operand_subword (y, i, 1, mode);
3536 /* If we can't get a part of Y, put Y into memory if it is a
3537 constant. Otherwise, force it into a register. If we still
3538 can't get a part of Y, abort. */
3539 if (ypart == 0 && CONSTANT_P (y))
3541 y = force_const_mem (mode, y);
3542 ypart = operand_subword (y, i, 1, mode);
3544 else if (ypart == 0)
3545 ypart = operand_subword_force (y, i, mode);
3547 if (xpart == 0 || ypart == 0)
3550 need_clobber |= (GET_CODE (xpart) == SUBREG);
3552 last_insn = emit_move_insn (xpart, ypart);
3558 /* Show the output dies here. This is necessary for SUBREGs
3559 of pseudos since we cannot track their lifetimes correctly;
3560 hard regs shouldn't appear here except as return values.
3561 We never want to emit such a clobber after reload. */
3563 && ! (reload_in_progress || reload_completed)
3564 && need_clobber != 0)
3565 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3575 /* If Y is representable exactly in a narrower mode, and the target can
3576 perform the extension directly from constant or memory, then emit the
3577 move as an extension. */
3580 compress_float_constant (x, y)
3583 enum machine_mode dstmode = GET_MODE (x);
3584 enum machine_mode orig_srcmode = GET_MODE (y);
3585 enum machine_mode srcmode;
3588 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3590 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3591 srcmode != orig_srcmode;
3592 srcmode = GET_MODE_WIDER_MODE (srcmode))
3595 rtx trunc_y, last_insn;
3597 /* Skip if the target can't extend this way. */
3598 ic = can_extend_p (dstmode, srcmode, 0);
3599 if (ic == CODE_FOR_nothing)
3602 /* Skip if the narrowed value isn't exact. */
3603 if (! exact_real_truncate (srcmode, &r))
3606 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3608 if (LEGITIMATE_CONSTANT_P (trunc_y))
3610 /* Skip if the target needs extra instructions to perform
3612 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3615 else if (float_extend_from_mem[dstmode][srcmode])
3616 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3620 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3621 last_insn = get_last_insn ();
3623 if (GET_CODE (x) == REG)
3624 REG_NOTES (last_insn)
3625 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3633 /* Pushing data onto the stack. */
3635 /* Push a block of length SIZE (perhaps variable)
3636 and return an rtx to address the beginning of the block.
3637 Note that it is not possible for the value returned to be a QUEUED.
3638 The value may be virtual_outgoing_args_rtx.
3640 EXTRA is the number of bytes of padding to push in addition to SIZE.
3641 BELOW nonzero means this padding comes at low addresses;
3642 otherwise, the padding comes at high addresses. */
3645 push_block (size, extra, below)
3651 size = convert_modes (Pmode, ptr_mode, size, 1);
3652 if (CONSTANT_P (size))
3653 anti_adjust_stack (plus_constant (size, extra));
3654 else if (GET_CODE (size) == REG && extra == 0)
3655 anti_adjust_stack (size);
3658 temp = copy_to_mode_reg (Pmode, size);
3660 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3661 temp, 0, OPTAB_LIB_WIDEN);
3662 anti_adjust_stack (temp);
3665 #ifndef STACK_GROWS_DOWNWARD
3671 temp = virtual_outgoing_args_rtx;
3672 if (extra != 0 && below)
3673 temp = plus_constant (temp, extra);
3677 if (GET_CODE (size) == CONST_INT)
3678 temp = plus_constant (virtual_outgoing_args_rtx,
3679 -INTVAL (size) - (below ? 0 : extra));
3680 else if (extra != 0 && !below)
3681 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3682 negate_rtx (Pmode, plus_constant (size, extra)));
3684 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 negate_rtx (Pmode, size));
3688 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3691 #ifdef PUSH_ROUNDING
3693 /* Emit single push insn. */
3696 emit_single_push_insn (mode, x, type)
3698 enum machine_mode mode;
3702 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3704 enum insn_code icode;
3705 insn_operand_predicate_fn pred;
3707 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3708 /* If there is push pattern, use it. Otherwise try old way of throwing
3709 MEM representing push operation to move expander. */
3710 icode = push_optab->handlers[(int) mode].insn_code;
3711 if (icode != CODE_FOR_nothing)
3713 if (((pred = insn_data[(int) icode].operand[0].predicate)
3714 && !((*pred) (x, mode))))
3715 x = force_reg (mode, x);
3716 emit_insn (GEN_FCN (icode) (x));
3719 if (GET_MODE_SIZE (mode) == rounded_size)
3720 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3723 #ifdef STACK_GROWS_DOWNWARD
3724 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3725 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3727 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3728 GEN_INT (rounded_size));
3730 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3733 dest = gen_rtx_MEM (mode, dest_addr);
3737 set_mem_attributes (dest, type, 1);
3739 if (flag_optimize_sibling_calls)
3740 /* Function incoming arguments may overlap with sibling call
3741 outgoing arguments and we cannot allow reordering of reads
3742 from function arguments with stores to outgoing arguments
3743 of sibling calls. */
3744 set_mem_alias_set (dest, 0);
3746 emit_move_insn (dest, x);
3750 /* Generate code to push X onto the stack, assuming it has mode MODE and
3752 MODE is redundant except when X is a CONST_INT (since they don't
3754 SIZE is an rtx for the size of data to be copied (in bytes),
3755 needed only if X is BLKmode.
3757 ALIGN (in bits) is maximum alignment we can assume.
3759 If PARTIAL and REG are both nonzero, then copy that many of the first
3760 words of X into registers starting with REG, and push the rest of X.
3761 The amount of space pushed is decreased by PARTIAL words,
3762 rounded *down* to a multiple of PARM_BOUNDARY.
3763 REG must be a hard register in this case.
3764 If REG is zero but PARTIAL is not, take any all others actions for an
3765 argument partially in registers, but do not actually load any
3768 EXTRA is the amount in bytes of extra space to leave next to this arg.
3769 This is ignored if an argument block has already been allocated.
3771 On a machine that lacks real push insns, ARGS_ADDR is the address of
3772 the bottom of the argument block for this call. We use indexing off there
3773 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3774 argument block has not been preallocated.
3776 ARGS_SO_FAR is the size of args previously pushed for this call.
3778 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3779 for arguments passed in registers. If nonzero, it will be the number
3780 of bytes required. */
3783 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3784 args_addr, args_so_far, reg_parm_stack_space,
3787 enum machine_mode mode;
3796 int reg_parm_stack_space;
3800 enum direction stack_direction
3801 #ifdef STACK_GROWS_DOWNWARD
3807 /* Decide where to pad the argument: `downward' for below,
3808 `upward' for above, or `none' for don't pad it.
3809 Default is below for small data on big-endian machines; else above. */
3810 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3812 /* Invert direction if stack is post-decrement.
3814 if (STACK_PUSH_CODE == POST_DEC)
3815 if (where_pad != none)
3816 where_pad = (where_pad == downward ? upward : downward);
3818 xinner = x = protect_from_queue (x, 0);
3820 if (mode == BLKmode)
3822 /* Copy a block into the stack, entirely or partially. */
3825 int used = partial * UNITS_PER_WORD;
3826 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3834 /* USED is now the # of bytes we need not copy to the stack
3835 because registers will take care of them. */
3838 xinner = adjust_address (xinner, BLKmode, used);
3840 /* If the partial register-part of the arg counts in its stack size,
3841 skip the part of stack space corresponding to the registers.
3842 Otherwise, start copying to the beginning of the stack space,
3843 by setting SKIP to 0. */
3844 skip = (reg_parm_stack_space == 0) ? 0 : used;
3846 #ifdef PUSH_ROUNDING
3847 /* Do it with several push insns if that doesn't take lots of insns
3848 and if there is no difficulty with push insns that skip bytes
3849 on the stack for alignment purposes. */
3852 && GET_CODE (size) == CONST_INT
3854 && MEM_ALIGN (xinner) >= align
3855 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3856 /* Here we avoid the case of a structure whose weak alignment
3857 forces many pushes of a small amount of data,
3858 and such small pushes do rounding that causes trouble. */
3859 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3860 || align >= BIGGEST_ALIGNMENT
3861 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3862 == (align / BITS_PER_UNIT)))
3863 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra && args_addr == 0
3869 && where_pad != none && where_pad != stack_direction)
3870 anti_adjust_stack (GEN_INT (extra));
3872 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3875 #endif /* PUSH_ROUNDING */
3879 /* Otherwise make space on the stack and copy the data
3880 to the address of that space. */
3882 /* Deduct words put into registers from the size we must copy. */
3885 if (GET_CODE (size) == CONST_INT)
3886 size = GEN_INT (INTVAL (size) - used);
3888 size = expand_binop (GET_MODE (size), sub_optab, size,
3889 GEN_INT (used), NULL_RTX, 0,
3893 /* Get the address of the stack space.
3894 In this case, we do not deal with EXTRA separately.
3895 A single stack adjust will do. */
3898 temp = push_block (size, extra, where_pad == downward);
3901 else if (GET_CODE (args_so_far) == CONST_INT)
3902 temp = memory_address (BLKmode,
3903 plus_constant (args_addr,
3904 skip + INTVAL (args_so_far)));
3906 temp = memory_address (BLKmode,
3907 plus_constant (gen_rtx_PLUS (Pmode,
3912 if (!ACCUMULATE_OUTGOING_ARGS)
3914 /* If the source is referenced relative to the stack pointer,
3915 copy it to another register to stabilize it. We do not need
3916 to do this if we know that we won't be changing sp. */
3918 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3919 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3920 temp = copy_to_reg (temp);
3923 target = gen_rtx_MEM (BLKmode, temp);
3927 set_mem_attributes (target, type, 1);
3928 /* Function incoming arguments may overlap with sibling call
3929 outgoing arguments and we cannot allow reordering of reads
3930 from function arguments with stores to outgoing arguments
3931 of sibling calls. */
3932 set_mem_alias_set (target, 0);
3935 /* ALIGN may well be better aligned than TYPE, e.g. due to
3936 PARM_BOUNDARY. Assume the caller isn't lying. */
3937 set_mem_align (target, align);
3939 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3942 else if (partial > 0)
3944 /* Scalar partly in registers. */
3946 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3949 /* # words of start of argument
3950 that we must make space for but need not store. */
3951 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3952 int args_offset = INTVAL (args_so_far);
3955 /* Push padding now if padding above and stack grows down,
3956 or if padding below and stack grows up.
3957 But if space already allocated, this has already been done. */
3958 if (extra && args_addr == 0
3959 && where_pad != none && where_pad != stack_direction)
3960 anti_adjust_stack (GEN_INT (extra));
3962 /* If we make space by pushing it, we might as well push
3963 the real data. Otherwise, we can leave OFFSET nonzero
3964 and leave the space uninitialized. */
3968 /* Now NOT_STACK gets the number of words that we don't need to
3969 allocate on the stack. */
3970 not_stack = partial - offset;
3972 /* If the partial register-part of the arg counts in its stack size,
3973 skip the part of stack space corresponding to the registers.
3974 Otherwise, start copying to the beginning of the stack space,
3975 by setting SKIP to 0. */
3976 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3978 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3979 x = validize_mem (force_const_mem (mode, x));
3981 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3982 SUBREGs of such registers are not allowed. */
3983 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3984 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3985 x = copy_to_reg (x);
3987 /* Loop over all the words allocated on the stack for this arg. */
3988 /* We can do it by words, because any scalar bigger than a word
3989 has a size a multiple of a word. */
3990 #ifndef PUSH_ARGS_REVERSED
3991 for (i = not_stack; i < size; i++)
3993 for (i = size - 1; i >= not_stack; i--)
3995 if (i >= not_stack + offset)
3996 emit_push_insn (operand_subword_force (x, i, mode),
3997 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3999 GEN_INT (args_offset + ((i - not_stack + skip)
4001 reg_parm_stack_space, alignment_pad);
4008 /* Push padding now if padding above and stack grows down,
4009 or if padding below and stack grows up.
4010 But if space already allocated, this has already been done. */
4011 if (extra && args_addr == 0
4012 && where_pad != none && where_pad != stack_direction)
4013 anti_adjust_stack (GEN_INT (extra));
4015 #ifdef PUSH_ROUNDING
4016 if (args_addr == 0 && PUSH_ARGS)
4017 emit_single_push_insn (mode, x, type);
4021 if (GET_CODE (args_so_far) == CONST_INT)
4023 = memory_address (mode,
4024 plus_constant (args_addr,
4025 INTVAL (args_so_far)));
4027 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4029 dest = gen_rtx_MEM (mode, addr);
4032 set_mem_attributes (dest, type, 1);
4033 /* Function incoming arguments may overlap with sibling call
4034 outgoing arguments and we cannot allow reordering of reads
4035 from function arguments with stores to outgoing arguments
4036 of sibling calls. */
4037 set_mem_alias_set (dest, 0);
4040 emit_move_insn (dest, x);
4044 /* If part should go in registers, copy that part
4045 into the appropriate registers. Do this now, at the end,
4046 since mem-to-mem copies above may do function calls. */
4047 if (partial > 0 && reg != 0)
4049 /* Handle calls that pass values in multiple non-contiguous locations.
4050 The Irix 6 ABI has examples of this. */
4051 if (GET_CODE (reg) == PARALLEL)
4052 emit_group_load (reg, x, -1); /* ??? size? */
4054 move_block_to_reg (REGNO (reg), x, partial, mode);
4057 if (extra && args_addr == 0 && where_pad == stack_direction)
4058 anti_adjust_stack (GEN_INT (extra));
4060 if (alignment_pad && args_addr == 0)
4061 anti_adjust_stack (alignment_pad);
4064 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4072 /* Only registers can be subtargets. */
4073 || GET_CODE (x) != REG
4074 /* If the register is readonly, it can't be set more than once. */
4075 || RTX_UNCHANGING_P (x)
4076 /* Don't use hard regs to avoid extending their life. */
4077 || REGNO (x) < FIRST_PSEUDO_REGISTER
4078 /* Avoid subtargets inside loops,
4079 since they hide some invariant expressions. */
4080 || preserve_subexpressions_p ())
4084 /* Expand an assignment that stores the value of FROM into TO.
4085 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4086 (This may contain a QUEUED rtx;
4087 if the value is constant, this rtx is a constant.)
4088 Otherwise, the returned value is NULL_RTX.
4090 SUGGEST_REG is no longer actually used.
4091 It used to mean, copy the value through a register
4092 and return that register, if that is possible.
4093 We now use WANT_VALUE to decide whether to do this. */
4096 expand_assignment (to, from, want_value, suggest_reg)
4099 int suggest_reg ATTRIBUTE_UNUSED;
4104 /* Don't crash if the lhs of the assignment was erroneous. */
4106 if (TREE_CODE (to) == ERROR_MARK)
4108 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4109 return want_value ? result : NULL_RTX;
4112 /* Assignment of a structure component needs special treatment
4113 if the structure component's rtx is not simply a MEM.
4114 Assignment of an array element at a constant index, and assignment of
4115 an array element in an unaligned packed structure field, has the same
4118 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4119 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4120 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4122 enum machine_mode mode1;
4123 HOST_WIDE_INT bitsize, bitpos;
4131 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4132 &unsignedp, &volatilep);
4134 /* If we are going to use store_bit_field and extract_bit_field,
4135 make sure to_rtx will be safe for multiple use. */
4137 if (mode1 == VOIDmode && want_value)
4138 tem = stabilize_reference (tem);
4140 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4144 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4146 if (GET_CODE (to_rtx) != MEM)
4149 #ifdef POINTERS_EXTEND_UNSIGNED
4150 if (GET_MODE (offset_rtx) != Pmode)
4151 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4153 if (GET_MODE (offset_rtx) != ptr_mode)
4154 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4157 /* A constant address in TO_RTX can have VOIDmode, we must not try
4158 to call force_reg for that case. Avoid that case. */
4159 if (GET_CODE (to_rtx) == MEM
4160 && GET_MODE (to_rtx) == BLKmode
4161 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4163 && (bitpos % bitsize) == 0
4164 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4165 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4167 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4171 to_rtx = offset_address (to_rtx, offset_rtx,
4172 highest_pow2_factor_for_type (TREE_TYPE (to),
4176 if (GET_CODE (to_rtx) == MEM)
4178 /* If the field is at offset zero, we could have been given the
4179 DECL_RTX of the parent struct. Don't munge it. */
4180 to_rtx = shallow_copy_rtx (to_rtx);
4182 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4185 /* Deal with volatile and readonly fields. The former is only done
4186 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4187 if (volatilep && GET_CODE (to_rtx) == MEM)
4189 if (to_rtx == orig_to_rtx)
4190 to_rtx = copy_rtx (to_rtx);
4191 MEM_VOLATILE_P (to_rtx) = 1;
4194 if (TREE_CODE (to) == COMPONENT_REF
4195 && TREE_READONLY (TREE_OPERAND (to, 1)))
4197 if (to_rtx == orig_to_rtx)
4198 to_rtx = copy_rtx (to_rtx);
4199 RTX_UNCHANGING_P (to_rtx) = 1;
4202 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4204 if (to_rtx == orig_to_rtx)
4205 to_rtx = copy_rtx (to_rtx);
4206 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4209 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4211 /* Spurious cast for HPUX compiler. */
4212 ? ((enum machine_mode)
4213 TYPE_MODE (TREE_TYPE (to)))
4215 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4217 preserve_temp_slots (result);
4221 /* If the value is meaningful, convert RESULT to the proper mode.
4222 Otherwise, return nothing. */
4223 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4224 TYPE_MODE (TREE_TYPE (from)),
4226 TREE_UNSIGNED (TREE_TYPE (to)))
4230 /* If the rhs is a function call and its value is not an aggregate,
4231 call the function before we start to compute the lhs.
4232 This is needed for correct code for cases such as
4233 val = setjmp (buf) on machines where reference to val
4234 requires loading up part of an address in a separate insn.
4236 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4237 since it might be a promoted variable where the zero- or sign- extension
4238 needs to be done. Handling this in the normal way is safe because no
4239 computation is done before the call. */
4240 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4241 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4242 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4243 && GET_CODE (DECL_RTL (to)) == REG))
4248 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4250 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4252 /* Handle calls that return values in multiple non-contiguous locations.
4253 The Irix 6 ABI has examples of this. */
4254 if (GET_CODE (to_rtx) == PARALLEL)
4255 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4256 else if (GET_MODE (to_rtx) == BLKmode)
4257 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4260 #ifdef POINTERS_EXTEND_UNSIGNED
4261 if (POINTER_TYPE_P (TREE_TYPE (to))
4262 && GET_MODE (to_rtx) != GET_MODE (value))
4263 value = convert_memory_address (GET_MODE (to_rtx), value);
4265 emit_move_insn (to_rtx, value);
4267 preserve_temp_slots (to_rtx);
4270 return want_value ? to_rtx : NULL_RTX;
4273 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4274 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4277 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4279 /* Don't move directly into a return register. */
4280 if (TREE_CODE (to) == RESULT_DECL
4281 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4286 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4288 if (GET_CODE (to_rtx) == PARALLEL)
4289 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4291 emit_move_insn (to_rtx, temp);
4293 preserve_temp_slots (to_rtx);
4296 return want_value ? to_rtx : NULL_RTX;
4299 /* In case we are returning the contents of an object which overlaps
4300 the place the value is being stored, use a safe function when copying
4301 a value through a pointer into a structure value return block. */
4302 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4303 && current_function_returns_struct
4304 && !current_function_returns_pcc_struct)
4309 size = expr_size (from);
4310 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4312 if (TARGET_MEM_FUNCTIONS)
4313 emit_library_call (memmove_libfunc, LCT_NORMAL,
4314 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4315 XEXP (from_rtx, 0), Pmode,
4316 convert_to_mode (TYPE_MODE (sizetype),
4317 size, TREE_UNSIGNED (sizetype)),
4318 TYPE_MODE (sizetype));
4320 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4321 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4322 XEXP (to_rtx, 0), Pmode,
4323 convert_to_mode (TYPE_MODE (integer_type_node),
4325 TREE_UNSIGNED (integer_type_node)),
4326 TYPE_MODE (integer_type_node));
4328 preserve_temp_slots (to_rtx);
4331 return want_value ? to_rtx : NULL_RTX;
4334 /* Compute FROM and store the value in the rtx we got. */
4337 result = store_expr (from, to_rtx, want_value);
4338 preserve_temp_slots (result);
4341 return want_value ? result : NULL_RTX;
4344 /* Generate code for computing expression EXP,
4345 and storing the value into TARGET.
4346 TARGET may contain a QUEUED rtx.
4348 If WANT_VALUE & 1 is nonzero, return a copy of the value
4349 not in TARGET, so that we can be sure to use the proper
4350 value in a containing expression even if TARGET has something
4351 else stored in it. If possible, we copy the value through a pseudo
4352 and return that pseudo. Or, if the value is constant, we try to
4353 return the constant. In some cases, we return a pseudo
4354 copied *from* TARGET.
4356 If the mode is BLKmode then we may return TARGET itself.
4357 It turns out that in BLKmode it doesn't cause a problem.
4358 because C has no operators that could combine two different
4359 assignments into the same BLKmode object with different values
4360 with no sequence point. Will other languages need this to
4363 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4364 to catch quickly any cases where the caller uses the value
4365 and fails to set WANT_VALUE.
4367 If WANT_VALUE & 2 is set, this is a store into a call param on the
4368 stack, and block moves may need to be treated specially. */
4371 store_expr (exp, target, want_value)
4377 int dont_return_target = 0;
4378 int dont_store_target = 0;
4380 if (VOID_TYPE_P (TREE_TYPE (exp)))
4382 /* C++ can generate ?: expressions with a throw expression in one
4383 branch and an rvalue in the other. Here, we resolve attempts to
4384 store the throw expression's nonexistant result. */
4387 expand_expr (exp, const0_rtx, VOIDmode, 0);
4390 if (TREE_CODE (exp) == COMPOUND_EXPR)
4392 /* Perform first part of compound expression, then assign from second
4394 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4395 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4397 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4399 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4401 /* For conditional expression, get safe form of the target. Then
4402 test the condition, doing the appropriate assignment on either
4403 side. This avoids the creation of unnecessary temporaries.
4404 For non-BLKmode, it is more efficient not to do this. */
4406 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4409 target = protect_from_queue (target, 1);
4411 do_pending_stack_adjust ();
4413 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4414 start_cleanup_deferral ();
4415 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4416 end_cleanup_deferral ();
4418 emit_jump_insn (gen_jump (lab2));
4421 start_cleanup_deferral ();
4422 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4423 end_cleanup_deferral ();
4428 return want_value & 1 ? target : NULL_RTX;
4430 else if (queued_subexp_p (target))
4431 /* If target contains a postincrement, let's not risk
4432 using it as the place to generate the rhs. */
4434 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4436 /* Expand EXP into a new pseudo. */
4437 temp = gen_reg_rtx (GET_MODE (target));
4438 temp = expand_expr (exp, temp, GET_MODE (target),
4440 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4443 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4445 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4447 /* If target is volatile, ANSI requires accessing the value
4448 *from* the target, if it is accessed. So make that happen.
4449 In no case return the target itself. */
4450 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4451 dont_return_target = 1;
4453 else if ((want_value & 1) != 0
4454 && GET_CODE (target) == MEM
4455 && ! MEM_VOLATILE_P (target)
4456 && GET_MODE (target) != BLKmode)
4457 /* If target is in memory and caller wants value in a register instead,
4458 arrange that. Pass TARGET as target for expand_expr so that,
4459 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4460 We know expand_expr will not use the target in that case.
4461 Don't do this if TARGET is volatile because we are supposed
4462 to write it and then read it. */
4464 temp = expand_expr (exp, target, GET_MODE (target),
4465 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4466 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4468 /* If TEMP is already in the desired TARGET, only copy it from
4469 memory and don't store it there again. */
4471 || (rtx_equal_p (temp, target)
4472 && ! side_effects_p (temp) && ! side_effects_p (target)))
4473 dont_store_target = 1;
4474 temp = copy_to_reg (temp);
4476 dont_return_target = 1;
4478 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4479 /* If this is a scalar in a register that is stored in a wider mode
4480 than the declared mode, compute the result into its declared mode
4481 and then convert to the wider mode. Our value is the computed
4484 rtx inner_target = 0;
4486 /* If we don't want a value, we can do the conversion inside EXP,
4487 which will often result in some optimizations. Do the conversion
4488 in two steps: first change the signedness, if needed, then
4489 the extend. But don't do this if the type of EXP is a subtype
4490 of something else since then the conversion might involve
4491 more than just converting modes. */
4492 if ((want_value & 1) == 0
4493 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4494 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4496 if (TREE_UNSIGNED (TREE_TYPE (exp))
4497 != SUBREG_PROMOTED_UNSIGNED_P (target))
4499 ((*lang_hooks.types.signed_or_unsigned_type)
4500 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4502 exp = convert ((*lang_hooks.types.type_for_mode)
4503 (GET_MODE (SUBREG_REG (target)),
4504 SUBREG_PROMOTED_UNSIGNED_P (target)),
4507 inner_target = SUBREG_REG (target);
4510 temp = expand_expr (exp, inner_target, VOIDmode,
4511 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4513 /* If TEMP is a MEM and we want a result value, make the access
4514 now so it gets done only once. Strictly speaking, this is
4515 only necessary if the MEM is volatile, or if the address
4516 overlaps TARGET. But not performing the load twice also
4517 reduces the amount of rtl we generate and then have to CSE. */
4518 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4519 temp = copy_to_reg (temp);
4521 /* If TEMP is a VOIDmode constant, use convert_modes to make
4522 sure that we properly convert it. */
4523 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4525 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4526 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4527 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4528 GET_MODE (target), temp,
4529 SUBREG_PROMOTED_UNSIGNED_P (target));
4532 convert_move (SUBREG_REG (target), temp,
4533 SUBREG_PROMOTED_UNSIGNED_P (target));
4535 /* If we promoted a constant, change the mode back down to match
4536 target. Otherwise, the caller might get confused by a result whose
4537 mode is larger than expected. */
4539 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4541 if (GET_MODE (temp) != VOIDmode)
4543 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4544 SUBREG_PROMOTED_VAR_P (temp) = 1;
4545 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4546 SUBREG_PROMOTED_UNSIGNED_P (target));
4549 temp = convert_modes (GET_MODE (target),
4550 GET_MODE (SUBREG_REG (target)),
4551 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4554 return want_value & 1 ? temp : NULL_RTX;
4558 temp = expand_expr (exp, target, GET_MODE (target),
4559 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4560 /* Return TARGET if it's a specified hardware register.
4561 If TARGET is a volatile mem ref, either return TARGET
4562 or return a reg copied *from* TARGET; ANSI requires this.
4564 Otherwise, if TEMP is not TARGET, return TEMP
4565 if it is constant (for efficiency),
4566 or if we really want the correct value. */
4567 if (!(target && GET_CODE (target) == REG
4568 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4569 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4570 && ! rtx_equal_p (temp, target)
4571 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4572 dont_return_target = 1;
4575 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4576 the same as that of TARGET, adjust the constant. This is needed, for
4577 example, in case it is a CONST_DOUBLE and we want only a word-sized
4579 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4580 && TREE_CODE (exp) != ERROR_MARK
4581 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4582 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4583 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4585 /* If value was not generated in the target, store it there.
4586 Convert the value to TARGET's type first if necessary.
4587 If TEMP and TARGET compare equal according to rtx_equal_p, but
4588 one or both of them are volatile memory refs, we have to distinguish
4590 - expand_expr has used TARGET. In this case, we must not generate
4591 another copy. This can be detected by TARGET being equal according
4593 - expand_expr has not used TARGET - that means that the source just
4594 happens to have the same RTX form. Since temp will have been created
4595 by expand_expr, it will compare unequal according to == .
4596 We must generate a copy in this case, to reach the correct number
4597 of volatile memory references. */
4599 if ((! rtx_equal_p (temp, target)
4600 || (temp != target && (side_effects_p (temp)
4601 || side_effects_p (target))))
4602 && TREE_CODE (exp) != ERROR_MARK
4603 && ! dont_store_target
4604 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4605 but TARGET is not valid memory reference, TEMP will differ
4606 from TARGET although it is really the same location. */
4607 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4608 || target != DECL_RTL_IF_SET (exp))
4609 /* If there's nothing to copy, don't bother. Don't call expr_size
4610 unless necessary, because some front-ends (C++) expr_size-hook
4611 aborts on objects that are not supposed to be bit-copied or
4613 && expr_size (exp) != const0_rtx)
4615 target = protect_from_queue (target, 1);
4616 if (GET_MODE (temp) != GET_MODE (target)
4617 && GET_MODE (temp) != VOIDmode)
4619 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4620 if (dont_return_target)
4622 /* In this case, we will return TEMP,
4623 so make sure it has the proper mode.
4624 But don't forget to store the value into TARGET. */
4625 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4626 emit_move_insn (target, temp);
4629 convert_move (target, temp, unsignedp);
4632 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4634 /* Handle copying a string constant into an array. The string
4635 constant may be shorter than the array. So copy just the string's
4636 actual length, and clear the rest. First get the size of the data
4637 type of the string, which is actually the size of the target. */
4638 rtx size = expr_size (exp);
4640 if (GET_CODE (size) == CONST_INT
4641 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4642 emit_block_move (target, temp, size,
4644 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4647 /* Compute the size of the data to copy from the string. */
4649 = size_binop (MIN_EXPR,
4650 make_tree (sizetype, size),
4651 size_int (TREE_STRING_LENGTH (exp)));
4653 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4655 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4658 /* Copy that much. */
4659 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4660 TREE_UNSIGNED (sizetype));
4661 emit_block_move (target, temp, copy_size_rtx,
4663 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4665 /* Figure out how much is left in TARGET that we have to clear.
4666 Do all calculations in ptr_mode. */
4667 if (GET_CODE (copy_size_rtx) == CONST_INT)
4669 size = plus_constant (size, -INTVAL (copy_size_rtx));
4670 target = adjust_address (target, BLKmode,
4671 INTVAL (copy_size_rtx));
4675 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4676 copy_size_rtx, NULL_RTX, 0,
4679 #ifdef POINTERS_EXTEND_UNSIGNED
4680 if (GET_MODE (copy_size_rtx) != Pmode)
4681 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4682 TREE_UNSIGNED (sizetype));
4685 target = offset_address (target, copy_size_rtx,
4686 highest_pow2_factor (copy_size));
4687 label = gen_label_rtx ();
4688 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4689 GET_MODE (size), 0, label);
4692 if (size != const0_rtx)
4693 clear_storage (target, size);
4699 /* Handle calls that return values in multiple non-contiguous locations.
4700 The Irix 6 ABI has examples of this. */
4701 else if (GET_CODE (target) == PARALLEL)
4702 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4703 else if (GET_MODE (temp) == BLKmode)
4704 emit_block_move (target, temp, expr_size (exp),
4706 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4708 emit_move_insn (target, temp);
4711 /* If we don't want a value, return NULL_RTX. */
4712 if ((want_value & 1) == 0)
4715 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4716 ??? The latter test doesn't seem to make sense. */
4717 else if (dont_return_target && GET_CODE (temp) != MEM)
4720 /* Return TARGET itself if it is a hard register. */
4721 else if ((want_value & 1) != 0
4722 && GET_MODE (target) != BLKmode
4723 && ! (GET_CODE (target) == REG
4724 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4725 return copy_to_reg (target);
4731 /* Return 1 if EXP just contains zeros. */
4739 switch (TREE_CODE (exp))
4743 case NON_LVALUE_EXPR:
4744 case VIEW_CONVERT_EXPR:
4745 return is_zeros_p (TREE_OPERAND (exp, 0));
4748 return integer_zerop (exp);
4752 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4755 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4758 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4759 elt = TREE_CHAIN (elt))
4760 if (!is_zeros_p (TREE_VALUE (elt)))
4766 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4767 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4768 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4769 if (! is_zeros_p (TREE_VALUE (elt)))
4779 /* Return 1 if EXP contains mostly (3/4) zeros. */
4782 mostly_zeros_p (exp)
4785 if (TREE_CODE (exp) == CONSTRUCTOR)
4787 int elts = 0, zeros = 0;
4788 tree elt = CONSTRUCTOR_ELTS (exp);
4789 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4791 /* If there are no ranges of true bits, it is all zero. */
4792 return elt == NULL_TREE;
4794 for (; elt; elt = TREE_CHAIN (elt))
4796 /* We do not handle the case where the index is a RANGE_EXPR,
4797 so the statistic will be somewhat inaccurate.
4798 We do make a more accurate count in store_constructor itself,
4799 so since this function is only used for nested array elements,
4800 this should be close enough. */
4801 if (mostly_zeros_p (TREE_VALUE (elt)))
4806 return 4 * zeros >= 3 * elts;
4809 return is_zeros_p (exp);
4812 /* Helper function for store_constructor.
4813 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4814 TYPE is the type of the CONSTRUCTOR, not the element type.
4815 CLEARED is as for store_constructor.
4816 ALIAS_SET is the alias set to use for any stores.
4818 This provides a recursive shortcut back to store_constructor when it isn't
4819 necessary to go through store_field. This is so that we can pass through
4820 the cleared field to let store_constructor know that we may not have to
4821 clear a substructure if the outer structure has already been cleared. */
4824 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4827 unsigned HOST_WIDE_INT bitsize;
4828 HOST_WIDE_INT bitpos;
4829 enum machine_mode mode;
4834 if (TREE_CODE (exp) == CONSTRUCTOR
4835 && bitpos % BITS_PER_UNIT == 0
4836 /* If we have a nonzero bitpos for a register target, then we just
4837 let store_field do the bitfield handling. This is unlikely to
4838 generate unnecessary clear instructions anyways. */
4839 && (bitpos == 0 || GET_CODE (target) == MEM))
4841 if (GET_CODE (target) == MEM)
4843 = adjust_address (target,
4844 GET_MODE (target) == BLKmode
4846 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4847 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4850 /* Update the alias set, if required. */
4851 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4852 && MEM_ALIAS_SET (target) != 0)
4854 target = copy_rtx (target);
4855 set_mem_alias_set (target, alias_set);
4858 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4861 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4865 /* Store the value of constructor EXP into the rtx TARGET.
4866 TARGET is either a REG or a MEM; we know it cannot conflict, since
4867 safe_from_p has been called.
4868 CLEARED is true if TARGET is known to have been zero'd.
4869 SIZE is the number of bytes of TARGET we are allowed to modify: this
4870 may not be the same as the size of EXP if we are assigning to a field
4871 which has been packed to exclude padding bits. */
4874 store_constructor (exp, target, cleared, size)
4880 tree type = TREE_TYPE (exp);
4881 #ifdef WORD_REGISTER_OPERATIONS
4882 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4885 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4886 || TREE_CODE (type) == QUAL_UNION_TYPE)
4890 /* We either clear the aggregate or indicate the value is dead. */
4891 if ((TREE_CODE (type) == UNION_TYPE
4892 || TREE_CODE (type) == QUAL_UNION_TYPE)
4894 && ! CONSTRUCTOR_ELTS (exp))
4895 /* If the constructor is empty, clear the union. */
4897 clear_storage (target, expr_size (exp));
4901 /* If we are building a static constructor into a register,
4902 set the initial value as zero so we can fold the value into
4903 a constant. But if more than one register is involved,
4904 this probably loses. */
4905 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4906 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4908 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4912 /* If the constructor has fewer fields than the structure
4913 or if we are initializing the structure to mostly zeros,
4914 clear the whole structure first. Don't do this if TARGET is a
4915 register whose mode size isn't equal to SIZE since clear_storage
4916 can't handle this case. */
4917 else if (! cleared && size > 0
4918 && ((list_length (CONSTRUCTOR_ELTS (exp))
4919 != fields_length (type))
4920 || mostly_zeros_p (exp))
4921 && (GET_CODE (target) != REG
4922 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4925 rtx xtarget = target;
4927 if (readonly_fields_p (type))
4929 xtarget = copy_rtx (xtarget);
4930 RTX_UNCHANGING_P (xtarget) = 1;
4933 clear_storage (xtarget, GEN_INT (size));
4938 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4940 /* Store each element of the constructor into
4941 the corresponding field of TARGET. */
4943 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4945 tree field = TREE_PURPOSE (elt);
4946 tree value = TREE_VALUE (elt);
4947 enum machine_mode mode;
4948 HOST_WIDE_INT bitsize;
4949 HOST_WIDE_INT bitpos = 0;
4951 rtx to_rtx = target;
4953 /* Just ignore missing fields.
4954 We cleared the whole structure, above,
4955 if any fields are missing. */
4959 if (cleared && is_zeros_p (value))
4962 if (host_integerp (DECL_SIZE (field), 1))
4963 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4967 mode = DECL_MODE (field);
4968 if (DECL_BIT_FIELD (field))
4971 offset = DECL_FIELD_OFFSET (field);
4972 if (host_integerp (offset, 0)
4973 && host_integerp (bit_position (field), 0))
4975 bitpos = int_bit_position (field);
4979 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4985 if (contains_placeholder_p (offset))
4986 offset = build (WITH_RECORD_EXPR, sizetype,
4987 offset, make_tree (TREE_TYPE (exp), target));
4989 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4990 if (GET_CODE (to_rtx) != MEM)
4993 #ifdef POINTERS_EXTEND_UNSIGNED
4994 if (GET_MODE (offset_rtx) != Pmode)
4995 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4997 if (GET_MODE (offset_rtx) != ptr_mode)
4998 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5001 to_rtx = offset_address (to_rtx, offset_rtx,
5002 highest_pow2_factor (offset));
5005 if (TREE_READONLY (field))
5007 if (GET_CODE (to_rtx) == MEM)
5008 to_rtx = copy_rtx (to_rtx);
5010 RTX_UNCHANGING_P (to_rtx) = 1;
5013 #ifdef WORD_REGISTER_OPERATIONS
5014 /* If this initializes a field that is smaller than a word, at the
5015 start of a word, try to widen it to a full word.
5016 This special case allows us to output C++ member function
5017 initializations in a form that the optimizers can understand. */
5018 if (GET_CODE (target) == REG
5019 && bitsize < BITS_PER_WORD
5020 && bitpos % BITS_PER_WORD == 0
5021 && GET_MODE_CLASS (mode) == MODE_INT
5022 && TREE_CODE (value) == INTEGER_CST
5024 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5026 tree type = TREE_TYPE (value);
5028 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5030 type = (*lang_hooks.types.type_for_size)
5031 (BITS_PER_WORD, TREE_UNSIGNED (type));
5032 value = convert (type, value);
5035 if (BYTES_BIG_ENDIAN)
5037 = fold (build (LSHIFT_EXPR, type, value,
5038 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5039 bitsize = BITS_PER_WORD;
5044 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5045 && DECL_NONADDRESSABLE_P (field))
5047 to_rtx = copy_rtx (to_rtx);
5048 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5051 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5052 value, type, cleared,
5053 get_alias_set (TREE_TYPE (field)));
5056 else if (TREE_CODE (type) == ARRAY_TYPE
5057 || TREE_CODE (type) == VECTOR_TYPE)
5062 tree domain = TYPE_DOMAIN (type);
5063 tree elttype = TREE_TYPE (type);
5065 HOST_WIDE_INT minelt = 0;
5066 HOST_WIDE_INT maxelt = 0;
5068 /* Vectors are like arrays, but the domain is stored via an array
5070 if (TREE_CODE (type) == VECTOR_TYPE)
5072 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5073 the same field as TYPE_DOMAIN, we are not guaranteed that
5075 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5076 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5079 const_bounds_p = (TYPE_MIN_VALUE (domain)
5080 && TYPE_MAX_VALUE (domain)
5081 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5082 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5084 /* If we have constant bounds for the range of the type, get them. */
5087 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5088 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5091 /* If the constructor has fewer elements than the array,
5092 clear the whole array first. Similarly if this is
5093 static constructor of a non-BLKmode object. */
5094 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5098 HOST_WIDE_INT count = 0, zero_count = 0;
5099 need_to_clear = ! const_bounds_p;
5101 /* This loop is a more accurate version of the loop in
5102 mostly_zeros_p (it handles RANGE_EXPR in an index).
5103 It is also needed to check for missing elements. */
5104 for (elt = CONSTRUCTOR_ELTS (exp);
5105 elt != NULL_TREE && ! need_to_clear;
5106 elt = TREE_CHAIN (elt))
5108 tree index = TREE_PURPOSE (elt);
5109 HOST_WIDE_INT this_node_count;
5111 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5113 tree lo_index = TREE_OPERAND (index, 0);
5114 tree hi_index = TREE_OPERAND (index, 1);
5116 if (! host_integerp (lo_index, 1)
5117 || ! host_integerp (hi_index, 1))
5123 this_node_count = (tree_low_cst (hi_index, 1)
5124 - tree_low_cst (lo_index, 1) + 1);
5127 this_node_count = 1;
5129 count += this_node_count;
5130 if (mostly_zeros_p (TREE_VALUE (elt)))
5131 zero_count += this_node_count;
5134 /* Clear the entire array first if there are any missing elements,
5135 or if the incidence of zero elements is >= 75%. */
5137 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5141 if (need_to_clear && size > 0)
5146 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5148 clear_storage (target, GEN_INT (size));
5152 else if (REG_P (target))
5153 /* Inform later passes that the old value is dead. */
5154 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5156 /* Store each element of the constructor into
5157 the corresponding element of TARGET, determined
5158 by counting the elements. */
5159 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5161 elt = TREE_CHAIN (elt), i++)
5163 enum machine_mode mode;
5164 HOST_WIDE_INT bitsize;
5165 HOST_WIDE_INT bitpos;
5167 tree value = TREE_VALUE (elt);
5168 tree index = TREE_PURPOSE (elt);
5169 rtx xtarget = target;
5171 if (cleared && is_zeros_p (value))
5174 unsignedp = TREE_UNSIGNED (elttype);
5175 mode = TYPE_MODE (elttype);
5176 if (mode == BLKmode)
5177 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5178 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5181 bitsize = GET_MODE_BITSIZE (mode);
5183 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5185 tree lo_index = TREE_OPERAND (index, 0);
5186 tree hi_index = TREE_OPERAND (index, 1);
5187 rtx index_r, pos_rtx, loop_end;
5188 struct nesting *loop;
5189 HOST_WIDE_INT lo, hi, count;
5192 /* If the range is constant and "small", unroll the loop. */
5194 && host_integerp (lo_index, 0)
5195 && host_integerp (hi_index, 0)
5196 && (lo = tree_low_cst (lo_index, 0),
5197 hi = tree_low_cst (hi_index, 0),
5198 count = hi - lo + 1,
5199 (GET_CODE (target) != MEM
5201 || (host_integerp (TYPE_SIZE (elttype), 1)
5202 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5205 lo -= minelt; hi -= minelt;
5206 for (; lo <= hi; lo++)
5208 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5210 if (GET_CODE (target) == MEM
5211 && !MEM_KEEP_ALIAS_SET_P (target)
5212 && TREE_CODE (type) == ARRAY_TYPE
5213 && TYPE_NONALIASED_COMPONENT (type))
5215 target = copy_rtx (target);
5216 MEM_KEEP_ALIAS_SET_P (target) = 1;
5219 store_constructor_field
5220 (target, bitsize, bitpos, mode, value, type, cleared,
5221 get_alias_set (elttype));
5226 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5227 loop_end = gen_label_rtx ();
5229 unsignedp = TREE_UNSIGNED (domain);
5231 index = build_decl (VAR_DECL, NULL_TREE, domain);
5234 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5236 SET_DECL_RTL (index, index_r);
5237 if (TREE_CODE (value) == SAVE_EXPR
5238 && SAVE_EXPR_RTL (value) == 0)
5240 /* Make sure value gets expanded once before the
5242 expand_expr (value, const0_rtx, VOIDmode, 0);
5245 store_expr (lo_index, index_r, 0);
5246 loop = expand_start_loop (0);
5248 /* Assign value to element index. */
5250 = convert (ssizetype,
5251 fold (build (MINUS_EXPR, TREE_TYPE (index),
5252 index, TYPE_MIN_VALUE (domain))));
5253 position = size_binop (MULT_EXPR, position,
5255 TYPE_SIZE_UNIT (elttype)));
5257 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5258 xtarget = offset_address (target, pos_rtx,
5259 highest_pow2_factor (position));
5260 xtarget = adjust_address (xtarget, mode, 0);
5261 if (TREE_CODE (value) == CONSTRUCTOR)
5262 store_constructor (value, xtarget, cleared,
5263 bitsize / BITS_PER_UNIT);
5265 store_expr (value, xtarget, 0);
5267 expand_exit_loop_if_false (loop,
5268 build (LT_EXPR, integer_type_node,
5271 expand_increment (build (PREINCREMENT_EXPR,
5273 index, integer_one_node), 0, 0);
5275 emit_label (loop_end);
5278 else if ((index != 0 && ! host_integerp (index, 0))
5279 || ! host_integerp (TYPE_SIZE (elttype), 1))
5284 index = ssize_int (1);
5287 index = convert (ssizetype,
5288 fold (build (MINUS_EXPR, index,
5289 TYPE_MIN_VALUE (domain))));
5291 position = size_binop (MULT_EXPR, index,
5293 TYPE_SIZE_UNIT (elttype)));
5294 xtarget = offset_address (target,
5295 expand_expr (position, 0, VOIDmode, 0),
5296 highest_pow2_factor (position));
5297 xtarget = adjust_address (xtarget, mode, 0);
5298 store_expr (value, xtarget, 0);
5303 bitpos = ((tree_low_cst (index, 0) - minelt)
5304 * tree_low_cst (TYPE_SIZE (elttype), 1));
5306 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5308 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5309 && TREE_CODE (type) == ARRAY_TYPE
5310 && TYPE_NONALIASED_COMPONENT (type))
5312 target = copy_rtx (target);
5313 MEM_KEEP_ALIAS_SET_P (target) = 1;
5316 store_constructor_field (target, bitsize, bitpos, mode, value,
5317 type, cleared, get_alias_set (elttype));
5323 /* Set constructor assignments. */
5324 else if (TREE_CODE (type) == SET_TYPE)
5326 tree elt = CONSTRUCTOR_ELTS (exp);
5327 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5328 tree domain = TYPE_DOMAIN (type);
5329 tree domain_min, domain_max, bitlength;
5331 /* The default implementation strategy is to extract the constant
5332 parts of the constructor, use that to initialize the target,
5333 and then "or" in whatever non-constant ranges we need in addition.
5335 If a large set is all zero or all ones, it is
5336 probably better to set it using memset (if available) or bzero.
5337 Also, if a large set has just a single range, it may also be
5338 better to first clear all the first clear the set (using
5339 bzero/memset), and set the bits we want. */
5341 /* Check for all zeros. */
5342 if (elt == NULL_TREE && size > 0)
5345 clear_storage (target, GEN_INT (size));
5349 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5350 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5351 bitlength = size_binop (PLUS_EXPR,
5352 size_diffop (domain_max, domain_min),
5355 nbits = tree_low_cst (bitlength, 1);
5357 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5358 are "complicated" (more than one range), initialize (the
5359 constant parts) by copying from a constant. */
5360 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5361 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5363 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5364 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5365 char *bit_buffer = (char *) alloca (nbits);
5366 HOST_WIDE_INT word = 0;
5367 unsigned int bit_pos = 0;
5368 unsigned int ibit = 0;
5369 unsigned int offset = 0; /* In bytes from beginning of set. */
5371 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5374 if (bit_buffer[ibit])
5376 if (BYTES_BIG_ENDIAN)
5377 word |= (1 << (set_word_size - 1 - bit_pos));
5379 word |= 1 << bit_pos;
5383 if (bit_pos >= set_word_size || ibit == nbits)
5385 if (word != 0 || ! cleared)
5387 rtx datum = GEN_INT (word);
5390 /* The assumption here is that it is safe to use
5391 XEXP if the set is multi-word, but not if
5392 it's single-word. */
5393 if (GET_CODE (target) == MEM)
5394 to_rtx = adjust_address (target, mode, offset);
5395 else if (offset == 0)
5399 emit_move_insn (to_rtx, datum);
5406 offset += set_word_size / BITS_PER_UNIT;
5411 /* Don't bother clearing storage if the set is all ones. */
5412 if (TREE_CHAIN (elt) != NULL_TREE
5413 || (TREE_PURPOSE (elt) == NULL_TREE
5415 : ( ! host_integerp (TREE_VALUE (elt), 0)
5416 || ! host_integerp (TREE_PURPOSE (elt), 0)
5417 || (tree_low_cst (TREE_VALUE (elt), 0)
5418 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5419 != (HOST_WIDE_INT) nbits))))
5420 clear_storage (target, expr_size (exp));
5422 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5424 /* Start of range of element or NULL. */
5425 tree startbit = TREE_PURPOSE (elt);
5426 /* End of range of element, or element value. */
5427 tree endbit = TREE_VALUE (elt);
5428 HOST_WIDE_INT startb, endb;
5429 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5431 bitlength_rtx = expand_expr (bitlength,
5432 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5434 /* Handle non-range tuple element like [ expr ]. */
5435 if (startbit == NULL_TREE)
5437 startbit = save_expr (endbit);
5441 startbit = convert (sizetype, startbit);
5442 endbit = convert (sizetype, endbit);
5443 if (! integer_zerop (domain_min))
5445 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5446 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5448 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5449 EXPAND_CONST_ADDRESS);
5450 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5451 EXPAND_CONST_ADDRESS);
5457 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5458 (GET_MODE (target), 0),
5461 emit_move_insn (targetx, target);
5464 else if (GET_CODE (target) == MEM)
5469 /* Optimization: If startbit and endbit are constants divisible
5470 by BITS_PER_UNIT, call memset instead. */
5471 if (TARGET_MEM_FUNCTIONS
5472 && TREE_CODE (startbit) == INTEGER_CST
5473 && TREE_CODE (endbit) == INTEGER_CST
5474 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5475 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5477 emit_library_call (memset_libfunc, LCT_NORMAL,
5479 plus_constant (XEXP (targetx, 0),
5480 startb / BITS_PER_UNIT),
5482 constm1_rtx, TYPE_MODE (integer_type_node),
5483 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5484 TYPE_MODE (sizetype));
5487 emit_library_call (setbits_libfunc, LCT_NORMAL,
5488 VOIDmode, 4, XEXP (targetx, 0),
5489 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5490 startbit_rtx, TYPE_MODE (sizetype),
5491 endbit_rtx, TYPE_MODE (sizetype));
5494 emit_move_insn (target, targetx);
5502 /* Store the value of EXP (an expression tree)
5503 into a subfield of TARGET which has mode MODE and occupies
5504 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5505 If MODE is VOIDmode, it means that we are storing into a bit-field.
5507 If VALUE_MODE is VOIDmode, return nothing in particular.
5508 UNSIGNEDP is not used in this case.
5510 Otherwise, return an rtx for the value stored. This rtx
5511 has mode VALUE_MODE if that is convenient to do.
5512 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5514 TYPE is the type of the underlying object,
5516 ALIAS_SET is the alias set for the destination. This value will
5517 (in general) be different from that for TARGET, since TARGET is a
5518 reference to the containing structure. */
5521 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5524 HOST_WIDE_INT bitsize;
5525 HOST_WIDE_INT bitpos;
5526 enum machine_mode mode;
5528 enum machine_mode value_mode;
5533 HOST_WIDE_INT width_mask = 0;
5535 if (TREE_CODE (exp) == ERROR_MARK)
5538 /* If we have nothing to store, do nothing unless the expression has
5541 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5542 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5543 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5545 /* If we are storing into an unaligned field of an aligned union that is
5546 in a register, we may have the mode of TARGET being an integer mode but
5547 MODE == BLKmode. In that case, get an aligned object whose size and
5548 alignment are the same as TARGET and store TARGET into it (we can avoid
5549 the store if the field being stored is the entire width of TARGET). Then
5550 call ourselves recursively to store the field into a BLKmode version of
5551 that object. Finally, load from the object into TARGET. This is not
5552 very efficient in general, but should only be slightly more expensive
5553 than the otherwise-required unaligned accesses. Perhaps this can be
5554 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5555 twice, once with emit_move_insn and once via store_field. */
5558 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5560 rtx object = assign_temp (type, 0, 1, 1);
5561 rtx blk_object = adjust_address (object, BLKmode, 0);
5563 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5564 emit_move_insn (object, target);
5566 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5569 emit_move_insn (target, object);
5571 /* We want to return the BLKmode version of the data. */
5575 if (GET_CODE (target) == CONCAT)
5577 /* We're storing into a struct containing a single __complex. */
5581 return store_expr (exp, target, 0);
5584 /* If the structure is in a register or if the component
5585 is a bit field, we cannot use addressing to access it.
5586 Use bit-field techniques or SUBREG to store in it. */
5588 if (mode == VOIDmode
5589 || (mode != BLKmode && ! direct_store[(int) mode]
5590 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5591 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5592 || GET_CODE (target) == REG
5593 || GET_CODE (target) == SUBREG
5594 /* If the field isn't aligned enough to store as an ordinary memref,
5595 store it as a bit field. */
5597 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5598 || bitpos % GET_MODE_ALIGNMENT (mode))
5599 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5600 || (bitpos % BITS_PER_UNIT != 0)))
5601 /* If the RHS and field are a constant size and the size of the
5602 RHS isn't the same size as the bitfield, we must use bitfield
5605 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5606 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5608 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5610 /* If BITSIZE is narrower than the size of the type of EXP
5611 we will be narrowing TEMP. Normally, what's wanted are the
5612 low-order bits. However, if EXP's type is a record and this is
5613 big-endian machine, we want the upper BITSIZE bits. */
5614 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5615 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5616 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5617 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5618 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5622 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5624 if (mode != VOIDmode && mode != BLKmode
5625 && mode != TYPE_MODE (TREE_TYPE (exp)))
5626 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5628 /* If the modes of TARGET and TEMP are both BLKmode, both
5629 must be in memory and BITPOS must be aligned on a byte
5630 boundary. If so, we simply do a block copy. */
5631 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5633 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5634 || bitpos % BITS_PER_UNIT != 0)
5637 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5638 emit_block_move (target, temp,
5639 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5643 return value_mode == VOIDmode ? const0_rtx : target;
5646 /* Store the value in the bitfield. */
5647 store_bit_field (target, bitsize, bitpos, mode, temp,
5648 int_size_in_bytes (type));
5650 if (value_mode != VOIDmode)
5652 /* The caller wants an rtx for the value.
5653 If possible, avoid refetching from the bitfield itself. */
5655 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5658 enum machine_mode tmode;
5660 tmode = GET_MODE (temp);
5661 if (tmode == VOIDmode)
5665 return expand_and (tmode, temp,
5666 gen_int_mode (width_mask, tmode),
5669 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5670 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5671 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5674 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5675 NULL_RTX, value_mode, VOIDmode,
5676 int_size_in_bytes (type));
5682 rtx addr = XEXP (target, 0);
5683 rtx to_rtx = target;
5685 /* If a value is wanted, it must be the lhs;
5686 so make the address stable for multiple use. */
5688 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5689 && ! CONSTANT_ADDRESS_P (addr)
5690 /* A frame-pointer reference is already stable. */
5691 && ! (GET_CODE (addr) == PLUS
5692 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5693 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5694 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5695 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5697 /* Now build a reference to just the desired component. */
5699 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5701 if (to_rtx == target)
5702 to_rtx = copy_rtx (to_rtx);
5704 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5705 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5706 set_mem_alias_set (to_rtx, alias_set);
5708 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5712 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5713 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5714 codes and find the ultimate containing object, which we return.
5716 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5717 bit position, and *PUNSIGNEDP to the signedness of the field.
5718 If the position of the field is variable, we store a tree
5719 giving the variable offset (in units) in *POFFSET.
5720 This offset is in addition to the bit position.
5721 If the position is not variable, we store 0 in *POFFSET.
5723 If any of the extraction expressions is volatile,
5724 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5726 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5727 is a mode that can be used to access the field. In that case, *PBITSIZE
5730 If the field describes a variable-sized object, *PMODE is set to
5731 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5732 this case, but the address of the object can be found. */
5735 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5736 punsignedp, pvolatilep)
5738 HOST_WIDE_INT *pbitsize;
5739 HOST_WIDE_INT *pbitpos;
5741 enum machine_mode *pmode;
5746 enum machine_mode mode = VOIDmode;
5747 tree offset = size_zero_node;
5748 tree bit_offset = bitsize_zero_node;
5749 tree placeholder_ptr = 0;
5752 /* First get the mode, signedness, and size. We do this from just the
5753 outermost expression. */
5754 if (TREE_CODE (exp) == COMPONENT_REF)
5756 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5757 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5758 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5760 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5762 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5764 size_tree = TREE_OPERAND (exp, 1);
5765 *punsignedp = TREE_UNSIGNED (exp);
5769 mode = TYPE_MODE (TREE_TYPE (exp));
5770 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5772 if (mode == BLKmode)
5773 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5775 *pbitsize = GET_MODE_BITSIZE (mode);
5780 if (! host_integerp (size_tree, 1))
5781 mode = BLKmode, *pbitsize = -1;
5783 *pbitsize = tree_low_cst (size_tree, 1);
5786 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5787 and find the ultimate containing object. */
5790 if (TREE_CODE (exp) == BIT_FIELD_REF)
5791 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5792 else if (TREE_CODE (exp) == COMPONENT_REF)
5794 tree field = TREE_OPERAND (exp, 1);
5795 tree this_offset = DECL_FIELD_OFFSET (field);
5797 /* If this field hasn't been filled in yet, don't go
5798 past it. This should only happen when folding expressions
5799 made during type construction. */
5800 if (this_offset == 0)
5802 else if (! TREE_CONSTANT (this_offset)
5803 && contains_placeholder_p (this_offset))
5804 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5806 offset = size_binop (PLUS_EXPR, offset, this_offset);
5807 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5808 DECL_FIELD_BIT_OFFSET (field));
5810 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5813 else if (TREE_CODE (exp) == ARRAY_REF
5814 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5816 tree index = TREE_OPERAND (exp, 1);
5817 tree array = TREE_OPERAND (exp, 0);
5818 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5819 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5820 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5822 /* We assume all arrays have sizes that are a multiple of a byte.
5823 First subtract the lower bound, if any, in the type of the
5824 index, then convert to sizetype and multiply by the size of the
5826 if (low_bound != 0 && ! integer_zerop (low_bound))
5827 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5830 /* If the index has a self-referential type, pass it to a
5831 WITH_RECORD_EXPR; if the component size is, pass our
5832 component to one. */
5833 if (! TREE_CONSTANT (index)
5834 && contains_placeholder_p (index))
5835 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5836 if (! TREE_CONSTANT (unit_size)
5837 && contains_placeholder_p (unit_size))
5838 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5840 offset = size_binop (PLUS_EXPR, offset,
5841 size_binop (MULT_EXPR,
5842 convert (sizetype, index),
5846 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5848 tree new = find_placeholder (exp, &placeholder_ptr);
5850 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5851 We might have been called from tree optimization where we
5852 haven't set up an object yet. */
5861 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5862 conversions that don't change the mode, and all view conversions
5863 except those that need to "step up" the alignment. */
5864 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5865 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5866 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5867 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5869 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5870 < BIGGEST_ALIGNMENT)
5871 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5872 || TYPE_ALIGN_OK (TREE_TYPE
5873 (TREE_OPERAND (exp, 0))))))
5874 && ! ((TREE_CODE (exp) == NOP_EXPR
5875 || TREE_CODE (exp) == CONVERT_EXPR)
5876 && (TYPE_MODE (TREE_TYPE (exp))
5877 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5880 /* If any reference in the chain is volatile, the effect is volatile. */
5881 if (TREE_THIS_VOLATILE (exp))
5884 exp = TREE_OPERAND (exp, 0);
5887 /* If OFFSET is constant, see if we can return the whole thing as a
5888 constant bit position. Otherwise, split it up. */
5889 if (host_integerp (offset, 0)
5890 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5892 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5893 && host_integerp (tem, 0))
5894 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5896 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5902 /* Return 1 if T is an expression that get_inner_reference handles. */
5905 handled_component_p (t)
5908 switch (TREE_CODE (t))
5913 case ARRAY_RANGE_REF:
5914 case NON_LVALUE_EXPR:
5915 case VIEW_CONVERT_EXPR:
5920 return (TYPE_MODE (TREE_TYPE (t))
5921 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5928 /* Given an rtx VALUE that may contain additions and multiplications, return
5929 an equivalent value that just refers to a register, memory, or constant.
5930 This is done by generating instructions to perform the arithmetic and
5931 returning a pseudo-register containing the value.
5933 The returned value may be a REG, SUBREG, MEM or constant. */
5936 force_operand (value, target)
5940 /* Use subtarget as the target for operand 0 of a binary operation. */
5941 rtx subtarget = get_subtarget (target);
5942 enum rtx_code code = GET_CODE (value);
5944 /* Check for a PIC address load. */
5945 if ((code == PLUS || code == MINUS)
5946 && XEXP (value, 0) == pic_offset_table_rtx
5947 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5948 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5949 || GET_CODE (XEXP (value, 1)) == CONST))
5952 subtarget = gen_reg_rtx (GET_MODE (value));
5953 emit_move_insn (subtarget, value);
5957 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5960 target = gen_reg_rtx (GET_MODE (value));
5961 convert_move (target, force_operand (XEXP (value, 0), NULL),
5962 code == ZERO_EXTEND);
5966 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5968 op2 = XEXP (value, 1);
5969 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5971 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5974 op2 = negate_rtx (GET_MODE (value), op2);
5977 /* Check for an addition with OP2 a constant integer and our first
5978 operand a PLUS of a virtual register and something else. In that
5979 case, we want to emit the sum of the virtual register and the
5980 constant first and then add the other value. This allows virtual
5981 register instantiation to simply modify the constant rather than
5982 creating another one around this addition. */
5983 if (code == PLUS && GET_CODE (op2) == CONST_INT
5984 && GET_CODE (XEXP (value, 0)) == PLUS
5985 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5986 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5987 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5989 rtx temp = expand_simple_binop (GET_MODE (value), code,
5990 XEXP (XEXP (value, 0), 0), op2,
5991 subtarget, 0, OPTAB_LIB_WIDEN);
5992 return expand_simple_binop (GET_MODE (value), code, temp,
5993 force_operand (XEXP (XEXP (value,
5995 target, 0, OPTAB_LIB_WIDEN);
5998 op1 = force_operand (XEXP (value, 0), subtarget);
5999 op2 = force_operand (op2, NULL_RTX);
6003 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6005 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6006 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6007 target, 1, OPTAB_LIB_WIDEN);
6009 return expand_divmod (0,
6010 FLOAT_MODE_P (GET_MODE (value))
6011 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6012 GET_MODE (value), op1, op2, target, 0);
6015 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6019 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6023 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6027 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6028 target, 0, OPTAB_LIB_WIDEN);
6031 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6032 target, 1, OPTAB_LIB_WIDEN);
6035 if (GET_RTX_CLASS (code) == '1')
6037 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6038 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6041 #ifdef INSN_SCHEDULING
6042 /* On machines that have insn scheduling, we want all memory reference to be
6043 explicit, so we need to deal with such paradoxical SUBREGs. */
6044 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6045 && (GET_MODE_SIZE (GET_MODE (value))
6046 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6048 = simplify_gen_subreg (GET_MODE (value),
6049 force_reg (GET_MODE (SUBREG_REG (value)),
6050 force_operand (SUBREG_REG (value),
6052 GET_MODE (SUBREG_REG (value)),
6053 SUBREG_BYTE (value));
6059 /* Subroutine of expand_expr: return nonzero iff there is no way that
6060 EXP can reference X, which is being modified. TOP_P is nonzero if this
6061 call is going to be used to determine whether we need a temporary
6062 for EXP, as opposed to a recursive call to this function.
6064 It is always safe for this routine to return zero since it merely
6065 searches for optimization opportunities. */
6068 safe_from_p (x, exp, top_p)
6075 static tree save_expr_list;
6078 /* If EXP has varying size, we MUST use a target since we currently
6079 have no way of allocating temporaries of variable size
6080 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6081 So we assume here that something at a higher level has prevented a
6082 clash. This is somewhat bogus, but the best we can do. Only
6083 do this when X is BLKmode and when we are at the top level. */
6084 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6085 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6086 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6087 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6088 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6090 && GET_MODE (x) == BLKmode)
6091 /* If X is in the outgoing argument area, it is always safe. */
6092 || (GET_CODE (x) == MEM
6093 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6094 || (GET_CODE (XEXP (x, 0)) == PLUS
6095 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6098 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6099 find the underlying pseudo. */
6100 if (GET_CODE (x) == SUBREG)
6103 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6107 /* A SAVE_EXPR might appear many times in the expression passed to the
6108 top-level safe_from_p call, and if it has a complex subexpression,
6109 examining it multiple times could result in a combinatorial explosion.
6110 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6111 with optimization took about 28 minutes to compile -- even though it was
6112 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6113 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6114 we have processed. Note that the only test of top_p was above. */
6123 rtn = safe_from_p (x, exp, 0);
6125 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6126 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6131 /* Now look at our tree code and possibly recurse. */
6132 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6135 exp_rtl = DECL_RTL_IF_SET (exp);
6142 if (TREE_CODE (exp) == TREE_LIST)
6146 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6148 exp = TREE_CHAIN (exp);
6151 if (TREE_CODE (exp) != TREE_LIST)
6152 return safe_from_p (x, exp, 0);
6155 else if (TREE_CODE (exp) == ERROR_MARK)
6156 return 1; /* An already-visited SAVE_EXPR? */
6162 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6167 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6171 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6172 the expression. If it is set, we conflict iff we are that rtx or
6173 both are in memory. Otherwise, we check all operands of the
6174 expression recursively. */
6176 switch (TREE_CODE (exp))
6179 /* If the operand is static or we are static, we can't conflict.
6180 Likewise if we don't conflict with the operand at all. */
6181 if (staticp (TREE_OPERAND (exp, 0))
6182 || TREE_STATIC (exp)
6183 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6186 /* Otherwise, the only way this can conflict is if we are taking
6187 the address of a DECL a that address if part of X, which is
6189 exp = TREE_OPERAND (exp, 0);
6192 if (!DECL_RTL_SET_P (exp)
6193 || GET_CODE (DECL_RTL (exp)) != MEM)
6196 exp_rtl = XEXP (DECL_RTL (exp), 0);
6201 if (GET_CODE (x) == MEM
6202 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6203 get_alias_set (exp)))
6208 /* Assume that the call will clobber all hard registers and
6210 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6211 || GET_CODE (x) == MEM)
6216 /* If a sequence exists, we would have to scan every instruction
6217 in the sequence to see if it was safe. This is probably not
6219 if (RTL_EXPR_SEQUENCE (exp))
6222 exp_rtl = RTL_EXPR_RTL (exp);
6225 case WITH_CLEANUP_EXPR:
6226 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6229 case CLEANUP_POINT_EXPR:
6230 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6233 exp_rtl = SAVE_EXPR_RTL (exp);
6237 /* If we've already scanned this, don't do it again. Otherwise,
6238 show we've scanned it and record for clearing the flag if we're
6240 if (TREE_PRIVATE (exp))
6243 TREE_PRIVATE (exp) = 1;
6244 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6246 TREE_PRIVATE (exp) = 0;
6250 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6254 /* The only operand we look at is operand 1. The rest aren't
6255 part of the expression. */
6256 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6258 case METHOD_CALL_EXPR:
6259 /* This takes an rtx argument, but shouldn't appear here. */
6266 /* If we have an rtx, we do not need to scan our operands. */
6270 nops = first_rtl_op (TREE_CODE (exp));
6271 for (i = 0; i < nops; i++)
6272 if (TREE_OPERAND (exp, i) != 0
6273 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6276 /* If this is a language-specific tree code, it may require
6277 special handling. */
6278 if ((unsigned int) TREE_CODE (exp)
6279 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6280 && !(*lang_hooks.safe_from_p) (x, exp))
6284 /* If we have an rtl, find any enclosed object. Then see if we conflict
6288 if (GET_CODE (exp_rtl) == SUBREG)
6290 exp_rtl = SUBREG_REG (exp_rtl);
6291 if (GET_CODE (exp_rtl) == REG
6292 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6296 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6297 are memory and they conflict. */
6298 return ! (rtx_equal_p (x, exp_rtl)
6299 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6300 && true_dependence (exp_rtl, VOIDmode, x,
6301 rtx_addr_varies_p)));
6304 /* If we reach here, it is safe. */
6308 /* Subroutine of expand_expr: return rtx if EXP is a
6309 variable or parameter; else return 0. */
6316 switch (TREE_CODE (exp))
6320 return DECL_RTL (exp);
6326 #ifdef MAX_INTEGER_COMPUTATION_MODE
6329 check_max_integer_computation_mode (exp)
6332 enum tree_code code;
6333 enum machine_mode mode;
6335 /* Strip any NOPs that don't change the mode. */
6337 code = TREE_CODE (exp);
6339 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6340 if (code == NOP_EXPR
6341 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6344 /* First check the type of the overall operation. We need only look at
6345 unary, binary and relational operations. */
6346 if (TREE_CODE_CLASS (code) == '1'
6347 || TREE_CODE_CLASS (code) == '2'
6348 || TREE_CODE_CLASS (code) == '<')
6350 mode = TYPE_MODE (TREE_TYPE (exp));
6351 if (GET_MODE_CLASS (mode) == MODE_INT
6352 && mode > MAX_INTEGER_COMPUTATION_MODE)
6353 internal_error ("unsupported wide integer operation");
6356 /* Check operand of a unary op. */
6357 if (TREE_CODE_CLASS (code) == '1')
6359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6360 if (GET_MODE_CLASS (mode) == MODE_INT
6361 && mode > MAX_INTEGER_COMPUTATION_MODE)
6362 internal_error ("unsupported wide integer operation");
6365 /* Check operands of a binary/comparison op. */
6366 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6368 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6369 if (GET_MODE_CLASS (mode) == MODE_INT
6370 && mode > MAX_INTEGER_COMPUTATION_MODE)
6371 internal_error ("unsupported wide integer operation");
6373 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6374 if (GET_MODE_CLASS (mode) == MODE_INT
6375 && mode > MAX_INTEGER_COMPUTATION_MODE)
6376 internal_error ("unsupported wide integer operation");
6381 /* Return the highest power of two that EXP is known to be a multiple of.
6382 This is used in updating alignment of MEMs in array references. */
6384 static unsigned HOST_WIDE_INT
6385 highest_pow2_factor (exp)
6388 unsigned HOST_WIDE_INT c0, c1;
6390 switch (TREE_CODE (exp))
6393 /* We can find the lowest bit that's a one. If the low
6394 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6395 We need to handle this case since we can find it in a COND_EXPR,
6396 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6397 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6399 if (TREE_CONSTANT_OVERFLOW (exp))
6400 return BIGGEST_ALIGNMENT;
6403 /* Note: tree_low_cst is intentionally not used here,
6404 we don't care about the upper bits. */
6405 c0 = TREE_INT_CST_LOW (exp);
6407 return c0 ? c0 : BIGGEST_ALIGNMENT;
6411 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6412 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6413 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6414 return MIN (c0, c1);
6417 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6418 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6421 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6423 if (integer_pow2p (TREE_OPERAND (exp, 1))
6424 && host_integerp (TREE_OPERAND (exp, 1), 1))
6426 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6427 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6428 return MAX (1, c0 / c1);
6432 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6433 case SAVE_EXPR: case WITH_RECORD_EXPR:
6434 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6437 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6440 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6441 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6442 return MIN (c0, c1);
6451 /* Similar, except that it is known that the expression must be a multiple
6452 of the alignment of TYPE. */
6454 static unsigned HOST_WIDE_INT
6455 highest_pow2_factor_for_type (type, exp)
6459 unsigned HOST_WIDE_INT type_align, factor;
6461 factor = highest_pow2_factor (exp);
6462 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6463 return MAX (factor, type_align);
6466 /* Return an object on the placeholder list that matches EXP, a
6467 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6468 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6469 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6470 is a location which initially points to a starting location in the
6471 placeholder list (zero means start of the list) and where a pointer into
6472 the placeholder list at which the object is found is placed. */
6475 find_placeholder (exp, plist)
6479 tree type = TREE_TYPE (exp);
6480 tree placeholder_expr;
6482 for (placeholder_expr
6483 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6484 placeholder_expr != 0;
6485 placeholder_expr = TREE_CHAIN (placeholder_expr))
6487 tree need_type = TYPE_MAIN_VARIANT (type);
6490 /* Find the outermost reference that is of the type we want. If none,
6491 see if any object has a type that is a pointer to the type we
6493 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6494 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6495 || TREE_CODE (elt) == COND_EXPR)
6496 ? TREE_OPERAND (elt, 1)
6497 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6498 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6499 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6500 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6501 ? TREE_OPERAND (elt, 0) : 0))
6502 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6505 *plist = placeholder_expr;
6509 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6511 = ((TREE_CODE (elt) == COMPOUND_EXPR
6512 || TREE_CODE (elt) == COND_EXPR)
6513 ? TREE_OPERAND (elt, 1)
6514 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6515 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6516 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6517 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6518 ? TREE_OPERAND (elt, 0) : 0))
6519 if (POINTER_TYPE_P (TREE_TYPE (elt))
6520 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6524 *plist = placeholder_expr;
6525 return build1 (INDIRECT_REF, need_type, elt);
6532 /* expand_expr: generate code for computing expression EXP.
6533 An rtx for the computed value is returned. The value is never null.
6534 In the case of a void EXP, const0_rtx is returned.
6536 The value may be stored in TARGET if TARGET is nonzero.
6537 TARGET is just a suggestion; callers must assume that
6538 the rtx returned may not be the same as TARGET.
6540 If TARGET is CONST0_RTX, it means that the value will be ignored.
6542 If TMODE is not VOIDmode, it suggests generating the
6543 result in mode TMODE. But this is done only when convenient.
6544 Otherwise, TMODE is ignored and the value generated in its natural mode.
6545 TMODE is just a suggestion; callers must assume that
6546 the rtx returned may not have mode TMODE.
6548 Note that TARGET may have neither TMODE nor MODE. In that case, it
6549 probably will not be used.
6551 If MODIFIER is EXPAND_SUM then when EXP is an addition
6552 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6553 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6554 products as above, or REG or MEM, or constant.
6555 Ordinarily in such cases we would output mul or add instructions
6556 and then return a pseudo reg containing the sum.
6558 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6559 it also marks a label as absolutely required (it can't be dead).
6560 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6561 This is used for outputting expressions used in initializers.
6563 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6564 with a constant address even if that address is not normally legitimate.
6565 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6567 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6568 a call parameter. Such targets require special care as we haven't yet
6569 marked TARGET so that it's safe from being trashed by libcalls. We
6570 don't want to use TARGET for anything but the final result;
6571 Intermediate values must go elsewhere. Additionally, calls to
6572 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6575 expand_expr (exp, target, tmode, modifier)
6578 enum machine_mode tmode;
6579 enum expand_modifier modifier;
6582 tree type = TREE_TYPE (exp);
6583 int unsignedp = TREE_UNSIGNED (type);
6584 enum machine_mode mode;
6585 enum tree_code code = TREE_CODE (exp);
6587 rtx subtarget, original_target;
6591 /* Handle ERROR_MARK before anybody tries to access its type. */
6592 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6594 op0 = CONST0_RTX (tmode);
6600 mode = TYPE_MODE (type);
6601 /* Use subtarget as the target for operand 0 of a binary operation. */
6602 subtarget = get_subtarget (target);
6603 original_target = target;
6604 ignore = (target == const0_rtx
6605 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6606 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6607 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6608 && TREE_CODE (type) == VOID_TYPE));
6610 /* If we are going to ignore this result, we need only do something
6611 if there is a side-effect somewhere in the expression. If there
6612 is, short-circuit the most common cases here. Note that we must
6613 not call expand_expr with anything but const0_rtx in case this
6614 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6618 if (! TREE_SIDE_EFFECTS (exp))
6621 /* Ensure we reference a volatile object even if value is ignored, but
6622 don't do this if all we are doing is taking its address. */
6623 if (TREE_THIS_VOLATILE (exp)
6624 && TREE_CODE (exp) != FUNCTION_DECL
6625 && mode != VOIDmode && mode != BLKmode
6626 && modifier != EXPAND_CONST_ADDRESS)
6628 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6629 if (GET_CODE (temp) == MEM)
6630 temp = copy_to_reg (temp);
6634 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6635 || code == INDIRECT_REF || code == BUFFER_REF)
6636 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6639 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6640 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6642 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6643 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6646 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6648 /* If the second operand has no side effects, just evaluate
6650 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6652 else if (code == BIT_FIELD_REF)
6654 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6655 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6656 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6663 #ifdef MAX_INTEGER_COMPUTATION_MODE
6664 /* Only check stuff here if the mode we want is different from the mode
6665 of the expression; if it's the same, check_max_integer_computation_mode
6666 will handle it. Do we really need to check this stuff at all? */
6669 && GET_MODE (target) != mode
6670 && TREE_CODE (exp) != INTEGER_CST
6671 && TREE_CODE (exp) != PARM_DECL
6672 && TREE_CODE (exp) != ARRAY_REF
6673 && TREE_CODE (exp) != ARRAY_RANGE_REF
6674 && TREE_CODE (exp) != COMPONENT_REF
6675 && TREE_CODE (exp) != BIT_FIELD_REF
6676 && TREE_CODE (exp) != INDIRECT_REF
6677 && TREE_CODE (exp) != CALL_EXPR
6678 && TREE_CODE (exp) != VAR_DECL
6679 && TREE_CODE (exp) != RTL_EXPR)
6681 enum machine_mode mode = GET_MODE (target);
6683 if (GET_MODE_CLASS (mode) == MODE_INT
6684 && mode > MAX_INTEGER_COMPUTATION_MODE)
6685 internal_error ("unsupported wide integer operation");
6689 && TREE_CODE (exp) != INTEGER_CST
6690 && TREE_CODE (exp) != PARM_DECL
6691 && TREE_CODE (exp) != ARRAY_REF
6692 && TREE_CODE (exp) != ARRAY_RANGE_REF
6693 && TREE_CODE (exp) != COMPONENT_REF
6694 && TREE_CODE (exp) != BIT_FIELD_REF
6695 && TREE_CODE (exp) != INDIRECT_REF
6696 && TREE_CODE (exp) != VAR_DECL
6697 && TREE_CODE (exp) != CALL_EXPR
6698 && TREE_CODE (exp) != RTL_EXPR
6699 && GET_MODE_CLASS (tmode) == MODE_INT
6700 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6701 internal_error ("unsupported wide integer operation");
6703 check_max_integer_computation_mode (exp);
6706 /* If will do cse, generate all results into pseudo registers
6707 since 1) that allows cse to find more things
6708 and 2) otherwise cse could produce an insn the machine
6709 cannot support. An exception is a CONSTRUCTOR into a multi-word
6710 MEM: that's much more likely to be most efficient into the MEM.
6711 Another is a CALL_EXPR which must return in memory. */
6713 if (! cse_not_expected && mode != BLKmode && target
6714 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6715 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6716 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6723 tree function = decl_function_context (exp);
6724 /* Labels in containing functions, or labels used from initializers,
6726 if (modifier == EXPAND_INITIALIZER
6727 || (function != current_function_decl
6728 && function != inline_function_decl
6730 temp = force_label_rtx (exp);
6732 temp = label_rtx (exp);
6734 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6735 if (function != current_function_decl
6736 && function != inline_function_decl && function != 0)
6737 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6742 if (!DECL_RTL_SET_P (exp))
6744 error_with_decl (exp, "prior parameter's size depends on `%s'");
6745 return CONST0_RTX (mode);
6748 /* ... fall through ... */
6751 /* If a static var's type was incomplete when the decl was written,
6752 but the type is complete now, lay out the decl now. */
6753 if (DECL_SIZE (exp) == 0
6754 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6755 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6756 layout_decl (exp, 0);
6758 /* ... fall through ... */
6762 if (DECL_RTL (exp) == 0)
6765 /* Ensure variable marked as used even if it doesn't go through
6766 a parser. If it hasn't be used yet, write out an external
6768 if (! TREE_USED (exp))
6770 assemble_external (exp);
6771 TREE_USED (exp) = 1;
6774 /* Show we haven't gotten RTL for this yet. */
6777 /* Handle variables inherited from containing functions. */
6778 context = decl_function_context (exp);
6780 /* We treat inline_function_decl as an alias for the current function
6781 because that is the inline function whose vars, types, etc.
6782 are being merged into the current function.
6783 See expand_inline_function. */
6785 if (context != 0 && context != current_function_decl
6786 && context != inline_function_decl
6787 /* If var is static, we don't need a static chain to access it. */
6788 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6789 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6793 /* Mark as non-local and addressable. */
6794 DECL_NONLOCAL (exp) = 1;
6795 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6797 (*lang_hooks.mark_addressable) (exp);
6798 if (GET_CODE (DECL_RTL (exp)) != MEM)
6800 addr = XEXP (DECL_RTL (exp), 0);
6801 if (GET_CODE (addr) == MEM)
6803 = replace_equiv_address (addr,
6804 fix_lexical_addr (XEXP (addr, 0), exp));
6806 addr = fix_lexical_addr (addr, exp);
6808 temp = replace_equiv_address (DECL_RTL (exp), addr);
6811 /* This is the case of an array whose size is to be determined
6812 from its initializer, while the initializer is still being parsed.
6815 else if (GET_CODE (DECL_RTL (exp)) == MEM
6816 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6817 temp = validize_mem (DECL_RTL (exp));
6819 /* If DECL_RTL is memory, we are in the normal case and either
6820 the address is not valid or it is not a register and -fforce-addr
6821 is specified, get the address into a register. */
6823 else if (GET_CODE (DECL_RTL (exp)) == MEM
6824 && modifier != EXPAND_CONST_ADDRESS
6825 && modifier != EXPAND_SUM
6826 && modifier != EXPAND_INITIALIZER
6827 && (! memory_address_p (DECL_MODE (exp),
6828 XEXP (DECL_RTL (exp), 0))
6830 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6831 temp = replace_equiv_address (DECL_RTL (exp),
6832 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6834 /* If we got something, return it. But first, set the alignment
6835 if the address is a register. */
6838 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6839 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6844 /* If the mode of DECL_RTL does not match that of the decl, it
6845 must be a promoted value. We return a SUBREG of the wanted mode,
6846 but mark it so that we know that it was already extended. */
6848 if (GET_CODE (DECL_RTL (exp)) == REG
6849 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6851 /* Get the signedness used for this variable. Ensure we get the
6852 same mode we got when the variable was declared. */
6853 if (GET_MODE (DECL_RTL (exp))
6854 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6855 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6858 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6859 SUBREG_PROMOTED_VAR_P (temp) = 1;
6860 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6864 return DECL_RTL (exp);
6867 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6868 TREE_INT_CST_HIGH (exp), mode);
6870 /* ??? If overflow is set, fold will have done an incomplete job,
6871 which can result in (plus xx (const_int 0)), which can get
6872 simplified by validate_replace_rtx during virtual register
6873 instantiation, which can result in unrecognizable insns.
6874 Avoid this by forcing all overflows into registers. */
6875 if (TREE_CONSTANT_OVERFLOW (exp)
6876 && modifier != EXPAND_INITIALIZER)
6877 temp = force_reg (mode, temp);
6882 return const_vector_from_tree (exp);
6885 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6888 /* If optimized, generate immediate CONST_DOUBLE
6889 which will be turned into memory by reload if necessary.
6891 We used to force a register so that loop.c could see it. But
6892 this does not allow gen_* patterns to perform optimizations with
6893 the constants. It also produces two insns in cases like "x = 1.0;".
6894 On most machines, floating-point constants are not permitted in
6895 many insns, so we'd end up copying it to a register in any case.
6897 Now, we do the copying in expand_binop, if appropriate. */
6898 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6899 TYPE_MODE (TREE_TYPE (exp)));
6903 temp = output_constant_def (exp, 1);
6905 /* temp contains a constant address.
6906 On RISC machines where a constant address isn't valid,
6907 make some insns to get that address into a register. */
6908 if (modifier != EXPAND_CONST_ADDRESS
6909 && modifier != EXPAND_INITIALIZER
6910 && modifier != EXPAND_SUM
6911 && (! memory_address_p (mode, XEXP (temp, 0))
6912 || flag_force_addr))
6913 return replace_equiv_address (temp,
6914 copy_rtx (XEXP (temp, 0)));
6917 case EXPR_WITH_FILE_LOCATION:
6920 location_t saved_loc = input_location;
6921 input_filename = EXPR_WFL_FILENAME (exp);
6922 input_line = EXPR_WFL_LINENO (exp);
6923 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6924 emit_line_note (input_filename, input_line);
6925 /* Possibly avoid switching back and forth here. */
6926 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6927 input_location = saved_loc;
6932 context = decl_function_context (exp);
6934 /* If this SAVE_EXPR was at global context, assume we are an
6935 initialization function and move it into our context. */
6937 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6939 /* We treat inline_function_decl as an alias for the current function
6940 because that is the inline function whose vars, types, etc.
6941 are being merged into the current function.
6942 See expand_inline_function. */
6943 if (context == current_function_decl || context == inline_function_decl)
6946 /* If this is non-local, handle it. */
6949 /* The following call just exists to abort if the context is
6950 not of a containing function. */
6951 find_function_data (context);
6953 temp = SAVE_EXPR_RTL (exp);
6954 if (temp && GET_CODE (temp) == REG)
6956 put_var_into_stack (exp, /*rescan=*/true);
6957 temp = SAVE_EXPR_RTL (exp);
6959 if (temp == 0 || GET_CODE (temp) != MEM)
6962 replace_equiv_address (temp,
6963 fix_lexical_addr (XEXP (temp, 0), exp));
6965 if (SAVE_EXPR_RTL (exp) == 0)
6967 if (mode == VOIDmode)
6970 temp = assign_temp (build_qualified_type (type,
6972 | TYPE_QUAL_CONST)),
6975 SAVE_EXPR_RTL (exp) = temp;
6976 if (!optimize && GET_CODE (temp) == REG)
6977 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6980 /* If the mode of TEMP does not match that of the expression, it
6981 must be a promoted value. We pass store_expr a SUBREG of the
6982 wanted mode but mark it so that we know that it was already
6985 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6987 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6988 promote_mode (type, mode, &unsignedp, 0);
6989 SUBREG_PROMOTED_VAR_P (temp) = 1;
6990 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6993 if (temp == const0_rtx)
6994 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6996 store_expr (TREE_OPERAND (exp, 0), temp,
6997 modifier == EXPAND_STACK_PARM ? 2 : 0);
6999 TREE_USED (exp) = 1;
7002 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7003 must be a promoted value. We return a SUBREG of the wanted mode,
7004 but mark it so that we know that it was already extended. */
7006 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7007 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7009 /* Compute the signedness and make the proper SUBREG. */
7010 promote_mode (type, mode, &unsignedp, 0);
7011 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7012 SUBREG_PROMOTED_VAR_P (temp) = 1;
7013 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7017 return SAVE_EXPR_RTL (exp);
7022 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7023 TREE_OPERAND (exp, 0)
7024 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7028 case PLACEHOLDER_EXPR:
7030 tree old_list = placeholder_list;
7031 tree placeholder_expr = 0;
7033 exp = find_placeholder (exp, &placeholder_expr);
7037 placeholder_list = TREE_CHAIN (placeholder_expr);
7038 temp = expand_expr (exp, original_target, tmode, modifier);
7039 placeholder_list = old_list;
7043 case WITH_RECORD_EXPR:
7044 /* Put the object on the placeholder list, expand our first operand,
7045 and pop the list. */
7046 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7048 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7050 placeholder_list = TREE_CHAIN (placeholder_list);
7054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7055 expand_goto (TREE_OPERAND (exp, 0));
7057 expand_computed_goto (TREE_OPERAND (exp, 0));
7061 expand_exit_loop_if_false (NULL,
7062 invert_truthvalue (TREE_OPERAND (exp, 0)));
7065 case LABELED_BLOCK_EXPR:
7066 if (LABELED_BLOCK_BODY (exp))
7067 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7068 /* Should perhaps use expand_label, but this is simpler and safer. */
7069 do_pending_stack_adjust ();
7070 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7073 case EXIT_BLOCK_EXPR:
7074 if (EXIT_BLOCK_RETURN (exp))
7075 sorry ("returned value in block_exit_expr");
7076 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7081 expand_start_loop (1);
7082 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7090 tree vars = TREE_OPERAND (exp, 0);
7092 /* Need to open a binding contour here because
7093 if there are any cleanups they must be contained here. */
7094 expand_start_bindings (2);
7096 /* Mark the corresponding BLOCK for output in its proper place. */
7097 if (TREE_OPERAND (exp, 2) != 0
7098 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7099 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7101 /* If VARS have not yet been expanded, expand them now. */
7104 if (!DECL_RTL_SET_P (vars))
7106 expand_decl_init (vars);
7107 vars = TREE_CHAIN (vars);
7110 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7112 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7118 if (RTL_EXPR_SEQUENCE (exp))
7120 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7122 emit_insn (RTL_EXPR_SEQUENCE (exp));
7123 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7125 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7126 free_temps_for_rtl_expr (exp);
7127 return RTL_EXPR_RTL (exp);
7130 /* If we don't need the result, just ensure we evaluate any
7136 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7137 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7142 /* All elts simple constants => refer to a constant in memory. But
7143 if this is a non-BLKmode mode, let it store a field at a time
7144 since that should make a CONST_INT or CONST_DOUBLE when we
7145 fold. Likewise, if we have a target we can use, it is best to
7146 store directly into the target unless the type is large enough
7147 that memcpy will be used. If we are making an initializer and
7148 all operands are constant, put it in memory as well.
7150 FIXME: Avoid trying to fill vector constructors piece-meal.
7151 Output them with output_constant_def below unless we're sure
7152 they're zeros. This should go away when vector initializers
7153 are treated like VECTOR_CST instead of arrays.
7155 else if ((TREE_STATIC (exp)
7156 && ((mode == BLKmode
7157 && ! (target != 0 && safe_from_p (target, exp, 1)))
7158 || TREE_ADDRESSABLE (exp)
7159 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7160 && (! MOVE_BY_PIECES_P
7161 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7163 && ((TREE_CODE (type) == VECTOR_TYPE
7164 && !is_zeros_p (exp))
7165 || ! mostly_zeros_p (exp)))))
7166 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7168 rtx constructor = output_constant_def (exp, 1);
7170 if (modifier != EXPAND_CONST_ADDRESS
7171 && modifier != EXPAND_INITIALIZER
7172 && modifier != EXPAND_SUM)
7173 constructor = validize_mem (constructor);
7179 /* Handle calls that pass values in multiple non-contiguous
7180 locations. The Irix 6 ABI has examples of this. */
7181 if (target == 0 || ! safe_from_p (target, exp, 1)
7182 || GET_CODE (target) == PARALLEL
7183 || modifier == EXPAND_STACK_PARM)
7185 = assign_temp (build_qualified_type (type,
7187 | (TREE_READONLY (exp)
7188 * TYPE_QUAL_CONST))),
7189 0, TREE_ADDRESSABLE (exp), 1);
7191 store_constructor (exp, target, 0, int_expr_size (exp));
7197 tree exp1 = TREE_OPERAND (exp, 0);
7199 tree string = string_constant (exp1, &index);
7201 /* Try to optimize reads from const strings. */
7203 && TREE_CODE (string) == STRING_CST
7204 && TREE_CODE (index) == INTEGER_CST
7205 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7206 && GET_MODE_CLASS (mode) == MODE_INT
7207 && GET_MODE_SIZE (mode) == 1
7208 && modifier != EXPAND_WRITE)
7209 return gen_int_mode (TREE_STRING_POINTER (string)
7210 [TREE_INT_CST_LOW (index)], mode);
7212 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7213 op0 = memory_address (mode, op0);
7214 temp = gen_rtx_MEM (mode, op0);
7215 set_mem_attributes (temp, exp, 0);
7217 /* If we are writing to this object and its type is a record with
7218 readonly fields, we must mark it as readonly so it will
7219 conflict with readonly references to those fields. */
7220 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7221 RTX_UNCHANGING_P (temp) = 1;
7227 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7231 tree array = TREE_OPERAND (exp, 0);
7232 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7233 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7234 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7237 /* Optimize the special-case of a zero lower bound.
7239 We convert the low_bound to sizetype to avoid some problems
7240 with constant folding. (E.g. suppose the lower bound is 1,
7241 and its mode is QI. Without the conversion, (ARRAY
7242 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7243 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7245 if (! integer_zerop (low_bound))
7246 index = size_diffop (index, convert (sizetype, low_bound));
7248 /* Fold an expression like: "foo"[2].
7249 This is not done in fold so it won't happen inside &.
7250 Don't fold if this is for wide characters since it's too
7251 difficult to do correctly and this is a very rare case. */
7253 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7254 && TREE_CODE (array) == STRING_CST
7255 && TREE_CODE (index) == INTEGER_CST
7256 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7257 && GET_MODE_CLASS (mode) == MODE_INT
7258 && GET_MODE_SIZE (mode) == 1)
7259 return gen_int_mode (TREE_STRING_POINTER (array)
7260 [TREE_INT_CST_LOW (index)], mode);
7262 /* If this is a constant index into a constant array,
7263 just get the value from the array. Handle both the cases when
7264 we have an explicit constructor and when our operand is a variable
7265 that was declared const. */
7267 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7268 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7269 && TREE_CODE (index) == INTEGER_CST
7270 && 0 > compare_tree_int (index,
7271 list_length (CONSTRUCTOR_ELTS
7272 (TREE_OPERAND (exp, 0)))))
7276 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7277 i = TREE_INT_CST_LOW (index);
7278 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7282 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7286 else if (optimize >= 1
7287 && modifier != EXPAND_CONST_ADDRESS
7288 && modifier != EXPAND_INITIALIZER
7289 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7290 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7291 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7293 if (TREE_CODE (index) == INTEGER_CST)
7295 tree init = DECL_INITIAL (array);
7297 if (TREE_CODE (init) == CONSTRUCTOR)
7301 for (elem = CONSTRUCTOR_ELTS (init);
7303 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7304 elem = TREE_CHAIN (elem))
7307 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7308 return expand_expr (fold (TREE_VALUE (elem)), target,
7311 else if (TREE_CODE (init) == STRING_CST
7312 && 0 > compare_tree_int (index,
7313 TREE_STRING_LENGTH (init)))
7315 tree type = TREE_TYPE (TREE_TYPE (init));
7316 enum machine_mode mode = TYPE_MODE (type);
7318 if (GET_MODE_CLASS (mode) == MODE_INT
7319 && GET_MODE_SIZE (mode) == 1)
7320 return gen_int_mode (TREE_STRING_POINTER (init)
7321 [TREE_INT_CST_LOW (index)], mode);
7326 goto normal_inner_ref;
7329 /* If the operand is a CONSTRUCTOR, we can just extract the
7330 appropriate field if it is present. */
7331 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7335 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7336 elt = TREE_CHAIN (elt))
7337 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7338 /* We can normally use the value of the field in the
7339 CONSTRUCTOR. However, if this is a bitfield in
7340 an integral mode that we can fit in a HOST_WIDE_INT,
7341 we must mask only the number of bits in the bitfield,
7342 since this is done implicitly by the constructor. If
7343 the bitfield does not meet either of those conditions,
7344 we can't do this optimization. */
7345 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7346 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7348 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7349 <= HOST_BITS_PER_WIDE_INT))))
7351 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7352 && modifier == EXPAND_STACK_PARM)
7354 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7355 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7357 HOST_WIDE_INT bitsize
7358 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7359 enum machine_mode imode
7360 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7362 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7364 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7365 op0 = expand_and (imode, op0, op1, target);
7370 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7373 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7375 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7383 goto normal_inner_ref;
7386 case ARRAY_RANGE_REF:
7389 enum machine_mode mode1;
7390 HOST_WIDE_INT bitsize, bitpos;
7393 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7394 &mode1, &unsignedp, &volatilep);
7397 /* If we got back the original object, something is wrong. Perhaps
7398 we are evaluating an expression too early. In any event, don't
7399 infinitely recurse. */
7403 /* If TEM's type is a union of variable size, pass TARGET to the inner
7404 computation, since it will need a temporary and TARGET is known
7405 to have to do. This occurs in unchecked conversion in Ada. */
7409 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7410 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7412 && modifier != EXPAND_STACK_PARM
7413 ? target : NULL_RTX),
7415 (modifier == EXPAND_INITIALIZER
7416 || modifier == EXPAND_CONST_ADDRESS
7417 || modifier == EXPAND_STACK_PARM)
7418 ? modifier : EXPAND_NORMAL);
7420 /* If this is a constant, put it into a register if it is a
7421 legitimate constant and OFFSET is 0 and memory if it isn't. */
7422 if (CONSTANT_P (op0))
7424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7425 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7427 op0 = force_reg (mode, op0);
7429 op0 = validize_mem (force_const_mem (mode, op0));
7434 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7437 /* If this object is in a register, put it into memory.
7438 This case can't occur in C, but can in Ada if we have
7439 unchecked conversion of an expression from a scalar type to
7440 an array or record type. */
7441 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7442 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7444 /* If the operand is a SAVE_EXPR, we can deal with this by
7445 forcing the SAVE_EXPR into memory. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7448 put_var_into_stack (TREE_OPERAND (exp, 0),
7450 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7455 = build_qualified_type (TREE_TYPE (tem),
7456 (TYPE_QUALS (TREE_TYPE (tem))
7457 | TYPE_QUAL_CONST));
7458 rtx memloc = assign_temp (nt, 1, 1, 1);
7460 emit_move_insn (memloc, op0);
7465 if (GET_CODE (op0) != MEM)
7468 #ifdef POINTERS_EXTEND_UNSIGNED
7469 if (GET_MODE (offset_rtx) != Pmode)
7470 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7472 if (GET_MODE (offset_rtx) != ptr_mode)
7473 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7476 /* A constant address in OP0 can have VOIDmode, we must not try
7477 to call force_reg for that case. Avoid that case. */
7478 if (GET_CODE (op0) == MEM
7479 && GET_MODE (op0) == BLKmode
7480 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7482 && (bitpos % bitsize) == 0
7483 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7484 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7486 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7490 op0 = offset_address (op0, offset_rtx,
7491 highest_pow2_factor (offset));
7494 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7495 record its alignment as BIGGEST_ALIGNMENT. */
7496 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7497 && is_aligning_offset (offset, tem))
7498 set_mem_align (op0, BIGGEST_ALIGNMENT);
7500 /* Don't forget about volatility even if this is a bitfield. */
7501 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7503 if (op0 == orig_op0)
7504 op0 = copy_rtx (op0);
7506 MEM_VOLATILE_P (op0) = 1;
7509 /* The following code doesn't handle CONCAT.
7510 Assume only bitpos == 0 can be used for CONCAT, due to
7511 one element arrays having the same mode as its element. */
7512 if (GET_CODE (op0) == CONCAT)
7514 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7519 /* In cases where an aligned union has an unaligned object
7520 as a field, we might be extracting a BLKmode value from
7521 an integer-mode (e.g., SImode) object. Handle this case
7522 by doing the extract into an object as wide as the field
7523 (which we know to be the width of a basic mode), then
7524 storing into memory, and changing the mode to BLKmode. */
7525 if (mode1 == VOIDmode
7526 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7527 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7528 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7529 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7530 && modifier != EXPAND_CONST_ADDRESS
7531 && modifier != EXPAND_INITIALIZER)
7532 /* If the field isn't aligned enough to fetch as a memref,
7533 fetch it as a bit field. */
7534 || (mode1 != BLKmode
7535 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7536 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7537 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7538 || (bitpos % BITS_PER_UNIT != 0)))
7539 /* If the type and the field are a constant size and the
7540 size of the type isn't the same size as the bitfield,
7541 we must use bitfield operations. */
7543 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7545 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7548 enum machine_mode ext_mode = mode;
7550 if (ext_mode == BLKmode
7551 && ! (target != 0 && GET_CODE (op0) == MEM
7552 && GET_CODE (target) == MEM
7553 && bitpos % BITS_PER_UNIT == 0))
7554 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7556 if (ext_mode == BLKmode)
7558 /* In this case, BITPOS must start at a byte boundary and
7559 TARGET, if specified, must be a MEM. */
7560 if (GET_CODE (op0) != MEM
7561 || (target != 0 && GET_CODE (target) != MEM)
7562 || bitpos % BITS_PER_UNIT != 0)
7565 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7567 target = assign_temp (type, 0, 1, 1);
7569 emit_block_move (target, op0,
7570 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7572 (modifier == EXPAND_STACK_PARM
7573 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7578 op0 = validize_mem (op0);
7580 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7581 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7583 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7584 (modifier == EXPAND_STACK_PARM
7585 ? NULL_RTX : target),
7587 int_size_in_bytes (TREE_TYPE (tem)));
7589 /* If the result is a record type and BITSIZE is narrower than
7590 the mode of OP0, an integral mode, and this is a big endian
7591 machine, we must put the field into the high-order bits. */
7592 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7593 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7594 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7595 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7596 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7600 if (mode == BLKmode)
7602 rtx new = assign_temp (build_qualified_type
7603 ((*lang_hooks.types.type_for_mode)
7605 TYPE_QUAL_CONST), 0, 1, 1);
7607 emit_move_insn (new, op0);
7608 op0 = copy_rtx (new);
7609 PUT_MODE (op0, BLKmode);
7610 set_mem_attributes (op0, exp, 1);
7616 /* If the result is BLKmode, use that to access the object
7618 if (mode == BLKmode)
7621 /* Get a reference to just this component. */
7622 if (modifier == EXPAND_CONST_ADDRESS
7623 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7624 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7626 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7628 if (op0 == orig_op0)
7629 op0 = copy_rtx (op0);
7631 set_mem_attributes (op0, exp, 0);
7632 if (GET_CODE (XEXP (op0, 0)) == REG)
7633 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7635 MEM_VOLATILE_P (op0) |= volatilep;
7636 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7637 || modifier == EXPAND_CONST_ADDRESS
7638 || modifier == EXPAND_INITIALIZER)
7640 else if (target == 0)
7641 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7643 convert_move (target, op0, unsignedp);
7649 rtx insn, before = get_last_insn (), vtbl_ref;
7651 /* Evaluate the interior expression. */
7652 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7655 /* Get or create an instruction off which to hang a note. */
7656 if (REG_P (subtarget))
7659 insn = get_last_insn ();
7662 if (! INSN_P (insn))
7663 insn = prev_nonnote_insn (insn);
7667 target = gen_reg_rtx (GET_MODE (subtarget));
7668 insn = emit_move_insn (target, subtarget);
7671 /* Collect the data for the note. */
7672 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7673 vtbl_ref = plus_constant (vtbl_ref,
7674 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7675 /* Discard the initial CONST that was added. */
7676 vtbl_ref = XEXP (vtbl_ref, 0);
7679 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7684 /* Intended for a reference to a buffer of a file-object in Pascal.
7685 But it's not certain that a special tree code will really be
7686 necessary for these. INDIRECT_REF might work for them. */
7692 /* Pascal set IN expression.
7695 rlo = set_low - (set_low%bits_per_word);
7696 the_word = set [ (index - rlo)/bits_per_word ];
7697 bit_index = index % bits_per_word;
7698 bitmask = 1 << bit_index;
7699 return !!(the_word & bitmask); */
7701 tree set = TREE_OPERAND (exp, 0);
7702 tree index = TREE_OPERAND (exp, 1);
7703 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7704 tree set_type = TREE_TYPE (set);
7705 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7706 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7707 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7708 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7709 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7710 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7711 rtx setaddr = XEXP (setval, 0);
7712 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7714 rtx diff, quo, rem, addr, bit, result;
7716 /* If domain is empty, answer is no. Likewise if index is constant
7717 and out of bounds. */
7718 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7719 && TREE_CODE (set_low_bound) == INTEGER_CST
7720 && tree_int_cst_lt (set_high_bound, set_low_bound))
7721 || (TREE_CODE (index) == INTEGER_CST
7722 && TREE_CODE (set_low_bound) == INTEGER_CST
7723 && tree_int_cst_lt (index, set_low_bound))
7724 || (TREE_CODE (set_high_bound) == INTEGER_CST
7725 && TREE_CODE (index) == INTEGER_CST
7726 && tree_int_cst_lt (set_high_bound, index))))
7730 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7732 /* If we get here, we have to generate the code for both cases
7733 (in range and out of range). */
7735 op0 = gen_label_rtx ();
7736 op1 = gen_label_rtx ();
7738 if (! (GET_CODE (index_val) == CONST_INT
7739 && GET_CODE (lo_r) == CONST_INT))
7740 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7741 GET_MODE (index_val), iunsignedp, op1);
7743 if (! (GET_CODE (index_val) == CONST_INT
7744 && GET_CODE (hi_r) == CONST_INT))
7745 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7746 GET_MODE (index_val), iunsignedp, op1);
7748 /* Calculate the element number of bit zero in the first word
7750 if (GET_CODE (lo_r) == CONST_INT)
7751 rlow = GEN_INT (INTVAL (lo_r)
7752 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7754 rlow = expand_binop (index_mode, and_optab, lo_r,
7755 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7756 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7758 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7759 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7761 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7762 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7763 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7764 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7766 addr = memory_address (byte_mode,
7767 expand_binop (index_mode, add_optab, diff,
7768 setaddr, NULL_RTX, iunsignedp,
7771 /* Extract the bit we want to examine. */
7772 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7773 gen_rtx_MEM (byte_mode, addr),
7774 make_tree (TREE_TYPE (index), rem),
7776 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7777 GET_MODE (target) == byte_mode ? target : 0,
7778 1, OPTAB_LIB_WIDEN);
7780 if (result != target)
7781 convert_move (target, result, 1);
7783 /* Output the code to handle the out-of-range case. */
7786 emit_move_insn (target, const0_rtx);
7791 case WITH_CLEANUP_EXPR:
7792 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7794 WITH_CLEANUP_EXPR_RTL (exp)
7795 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7796 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7797 CLEANUP_EH_ONLY (exp));
7799 /* That's it for this cleanup. */
7800 TREE_OPERAND (exp, 1) = 0;
7802 return WITH_CLEANUP_EXPR_RTL (exp);
7804 case CLEANUP_POINT_EXPR:
7806 /* Start a new binding layer that will keep track of all cleanup
7807 actions to be performed. */
7808 expand_start_bindings (2);
7810 target_temp_slot_level = temp_slot_level;
7812 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7813 /* If we're going to use this value, load it up now. */
7815 op0 = force_not_mem (op0);
7816 preserve_temp_slots (op0);
7817 expand_end_bindings (NULL_TREE, 0, 0);
7822 /* Check for a built-in function. */
7823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7824 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7826 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7828 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7829 == BUILT_IN_FRONTEND)
7830 return (*lang_hooks.expand_expr) (exp, original_target,
7833 return expand_builtin (exp, target, subtarget, tmode, ignore);
7836 return expand_call (exp, target, ignore);
7838 case NON_LVALUE_EXPR:
7841 case REFERENCE_EXPR:
7842 if (TREE_OPERAND (exp, 0) == error_mark_node)
7845 if (TREE_CODE (type) == UNION_TYPE)
7847 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7849 /* If both input and output are BLKmode, this conversion isn't doing
7850 anything except possibly changing memory attribute. */
7851 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7853 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7856 result = copy_rtx (result);
7857 set_mem_attributes (result, exp, 0);
7862 target = assign_temp (type, 0, 1, 1);
7864 if (GET_CODE (target) == MEM)
7865 /* Store data into beginning of memory target. */
7866 store_expr (TREE_OPERAND (exp, 0),
7867 adjust_address (target, TYPE_MODE (valtype), 0),
7868 modifier == EXPAND_STACK_PARM ? 2 : 0);
7870 else if (GET_CODE (target) == REG)
7871 /* Store this field into a union of the proper type. */
7872 store_field (target,
7873 MIN ((int_size_in_bytes (TREE_TYPE
7874 (TREE_OPERAND (exp, 0)))
7876 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7877 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7878 VOIDmode, 0, type, 0);
7882 /* Return the entire union. */
7886 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7891 /* If the signedness of the conversion differs and OP0 is
7892 a promoted SUBREG, clear that indication since we now
7893 have to do the proper extension. */
7894 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7895 && GET_CODE (op0) == SUBREG)
7896 SUBREG_PROMOTED_VAR_P (op0) = 0;
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7902 if (GET_MODE (op0) == mode)
7905 /* If OP0 is a constant, just convert it into the proper mode. */
7906 if (CONSTANT_P (op0))
7908 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7909 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7911 if (modifier == EXPAND_INITIALIZER)
7912 return simplify_gen_subreg (mode, op0, inner_mode,
7913 subreg_lowpart_offset (mode,
7916 return convert_modes (mode, inner_mode, op0,
7917 TREE_UNSIGNED (inner_type));
7920 if (modifier == EXPAND_INITIALIZER)
7921 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7925 convert_to_mode (mode, op0,
7926 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7928 convert_move (target, op0,
7929 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7932 case VIEW_CONVERT_EXPR:
7933 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7935 /* If the input and output modes are both the same, we are done.
7936 Otherwise, if neither mode is BLKmode and both are integral and within
7937 a word, we can use gen_lowpart. If neither is true, make sure the
7938 operand is in memory and convert the MEM to the new mode. */
7939 if (TYPE_MODE (type) == GET_MODE (op0))
7941 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7942 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7943 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7944 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7945 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7946 op0 = gen_lowpart (TYPE_MODE (type), op0);
7947 else if (GET_CODE (op0) != MEM)
7949 /* If the operand is not a MEM, force it into memory. Since we
7950 are going to be be changing the mode of the MEM, don't call
7951 force_const_mem for constants because we don't allow pool
7952 constants to change mode. */
7953 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7955 if (TREE_ADDRESSABLE (exp))
7958 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7960 = assign_stack_temp_for_type
7961 (TYPE_MODE (inner_type),
7962 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7964 emit_move_insn (target, op0);
7968 /* At this point, OP0 is in the correct mode. If the output type is such
7969 that the operand is known to be aligned, indicate that it is.
7970 Otherwise, we need only be concerned about alignment for non-BLKmode
7972 if (GET_CODE (op0) == MEM)
7974 op0 = copy_rtx (op0);
7976 if (TYPE_ALIGN_OK (type))
7977 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7978 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7979 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7981 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7982 HOST_WIDE_INT temp_size
7983 = MAX (int_size_in_bytes (inner_type),
7984 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7985 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7986 temp_size, 0, type);
7987 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7989 if (TREE_ADDRESSABLE (exp))
7992 if (GET_MODE (op0) == BLKmode)
7993 emit_block_move (new_with_op0_mode, op0,
7994 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7995 (modifier == EXPAND_STACK_PARM
7996 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7998 emit_move_insn (new_with_op0_mode, op0);
8003 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8009 this_optab = ! unsignedp && flag_trapv
8010 && (GET_MODE_CLASS (mode) == MODE_INT)
8011 ? addv_optab : add_optab;
8013 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8014 something else, make sure we add the register to the constant and
8015 then to the other thing. This case can occur during strength
8016 reduction and doing it this way will produce better code if the
8017 frame pointer or argument pointer is eliminated.
8019 fold-const.c will ensure that the constant is always in the inner
8020 PLUS_EXPR, so the only case we need to do anything about is if
8021 sp, ap, or fp is our second argument, in which case we must swap
8022 the innermost first argument and our second argument. */
8024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8025 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8026 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8027 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8028 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8029 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8031 tree t = TREE_OPERAND (exp, 1);
8033 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8034 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8037 /* If the result is to be ptr_mode and we are adding an integer to
8038 something, we might be forming a constant. So try to use
8039 plus_constant. If it produces a sum and we can't accept it,
8040 use force_operand. This allows P = &ARR[const] to generate
8041 efficient code on machines where a SYMBOL_REF is not a valid
8044 If this is an EXPAND_SUM call, always return the sum. */
8045 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8046 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8048 if (modifier == EXPAND_STACK_PARM)
8050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8051 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8052 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8056 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8058 /* Use immed_double_const to ensure that the constant is
8059 truncated according to the mode of OP1, then sign extended
8060 to a HOST_WIDE_INT. Using the constant directly can result
8061 in non-canonical RTL in a 64x32 cross compile. */
8063 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8065 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8066 op1 = plus_constant (op1, INTVAL (constant_part));
8067 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8068 op1 = force_operand (op1, target);
8072 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8073 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8074 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8078 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8079 (modifier == EXPAND_INITIALIZER
8080 ? EXPAND_INITIALIZER : EXPAND_SUM));
8081 if (! CONSTANT_P (op0))
8083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8084 VOIDmode, modifier);
8085 /* Don't go to both_summands if modifier
8086 says it's not right to return a PLUS. */
8087 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8091 /* Use immed_double_const to ensure that the constant is
8092 truncated according to the mode of OP1, then sign extended
8093 to a HOST_WIDE_INT. Using the constant directly can result
8094 in non-canonical RTL in a 64x32 cross compile. */
8096 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8098 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8099 op0 = plus_constant (op0, INTVAL (constant_part));
8100 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8101 op0 = force_operand (op0, target);
8106 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8109 /* No sense saving up arithmetic to be done
8110 if it's all in the wrong mode to form part of an address.
8111 And force_operand won't know whether to sign-extend or
8113 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8114 || mode != ptr_mode)
8116 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8117 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8118 if (op0 == const0_rtx)
8120 if (op1 == const0_rtx)
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8126 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8128 /* We come here from MINUS_EXPR when the second operand is a
8131 /* Make sure any term that's a sum with a constant comes last. */
8132 if (GET_CODE (op0) == PLUS
8133 && CONSTANT_P (XEXP (op0, 1)))
8139 /* If adding to a sum including a constant,
8140 associate it to put the constant outside. */
8141 if (GET_CODE (op1) == PLUS
8142 && CONSTANT_P (XEXP (op1, 1)))
8144 rtx constant_term = const0_rtx;
8146 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8149 /* Ensure that MULT comes first if there is one. */
8150 else if (GET_CODE (op0) == MULT)
8151 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8153 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8155 /* Let's also eliminate constants from op0 if possible. */
8156 op0 = eliminate_constant_term (op0, &constant_term);
8158 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8159 their sum should be a constant. Form it into OP1, since the
8160 result we want will then be OP0 + OP1. */
8162 temp = simplify_binary_operation (PLUS, mode, constant_term,
8167 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8170 /* Put a constant term last and put a multiplication first. */
8171 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8172 temp = op1, op1 = op0, op0 = temp;
8174 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8175 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8178 /* For initializers, we are allowed to return a MINUS of two
8179 symbolic constants. Here we handle all cases when both operands
8181 /* Handle difference of two symbolic constants,
8182 for the sake of an initializer. */
8183 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8184 && really_constant_p (TREE_OPERAND (exp, 0))
8185 && really_constant_p (TREE_OPERAND (exp, 1)))
8187 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8189 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8192 /* If the last operand is a CONST_INT, use plus_constant of
8193 the negated constant. Else make the MINUS. */
8194 if (GET_CODE (op1) == CONST_INT)
8195 return plus_constant (op0, - INTVAL (op1));
8197 return gen_rtx_MINUS (mode, op0, op1);
8200 this_optab = ! unsignedp && flag_trapv
8201 && (GET_MODE_CLASS(mode) == MODE_INT)
8202 ? subv_optab : sub_optab;
8204 /* No sense saving up arithmetic to be done
8205 if it's all in the wrong mode to form part of an address.
8206 And force_operand won't know whether to sign-extend or
8208 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8209 || mode != ptr_mode)
8212 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8218 /* Convert A - const to A + (-const). */
8219 if (GET_CODE (op1) == CONST_INT)
8221 op1 = negate_rtx (mode, op1);
8228 /* If first operand is constant, swap them.
8229 Thus the following special case checks need only
8230 check the second operand. */
8231 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8233 tree t1 = TREE_OPERAND (exp, 0);
8234 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8235 TREE_OPERAND (exp, 1) = t1;
8238 /* Attempt to return something suitable for generating an
8239 indexed address, for machines that support that. */
8241 if (modifier == EXPAND_SUM && mode == ptr_mode
8242 && host_integerp (TREE_OPERAND (exp, 1), 0))
8244 tree exp1 = TREE_OPERAND (exp, 1);
8246 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8249 /* If we knew for certain that this is arithmetic for an array
8250 reference, and we knew the bounds of the array, then we could
8251 apply the distributive law across (PLUS X C) for constant C.
8252 Without such knowledge, we risk overflowing the computation
8253 when both X and C are large, but X+C isn't. */
8254 /* ??? Could perhaps special-case EXP being unsigned and C being
8255 positive. In that case we are certain that X+C is no smaller
8256 than X and so the transformed expression will overflow iff the
8257 original would have. */
8259 if (GET_CODE (op0) != REG)
8260 op0 = force_operand (op0, NULL_RTX);
8261 if (GET_CODE (op0) != REG)
8262 op0 = copy_to_mode_reg (mode, op0);
8264 return gen_rtx_MULT (mode, op0,
8265 gen_int_mode (tree_low_cst (exp1, 0),
8266 TYPE_MODE (TREE_TYPE (exp1))));
8269 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8272 if (modifier == EXPAND_STACK_PARM)
8275 /* Check for multiplying things that have been extended
8276 from a narrower type. If this machine supports multiplying
8277 in that narrower type with a result in the desired type,
8278 do it that way, and avoid the explicit type-conversion. */
8279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8280 && TREE_CODE (type) == INTEGER_TYPE
8281 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8282 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8283 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8284 && int_fits_type_p (TREE_OPERAND (exp, 1),
8285 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8286 /* Don't use a widening multiply if a shift will do. */
8287 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8288 > HOST_BITS_PER_WIDE_INT)
8289 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8291 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8292 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8294 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8295 /* If both operands are extended, they must either both
8296 be zero-extended or both be sign-extended. */
8297 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8299 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8301 enum machine_mode innermode
8302 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8303 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8304 ? smul_widen_optab : umul_widen_optab);
8305 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8306 ? umul_widen_optab : smul_widen_optab);
8307 if (mode == GET_MODE_WIDER_MODE (innermode))
8309 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8311 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8312 NULL_RTX, VOIDmode, 0);
8313 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8314 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8317 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8318 NULL_RTX, VOIDmode, 0);
8321 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8322 && innermode == word_mode)
8325 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8326 NULL_RTX, VOIDmode, 0);
8327 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8328 op1 = convert_modes (innermode, mode,
8329 expand_expr (TREE_OPERAND (exp, 1),
8330 NULL_RTX, VOIDmode, 0),
8333 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8334 NULL_RTX, VOIDmode, 0);
8335 temp = expand_binop (mode, other_optab, op0, op1, target,
8336 unsignedp, OPTAB_LIB_WIDEN);
8337 htem = expand_mult_highpart_adjust (innermode,
8338 gen_highpart (innermode, temp),
8340 gen_highpart (innermode, temp),
8342 emit_move_insn (gen_highpart (innermode, temp), htem);
8347 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8348 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8349 return expand_mult (mode, op0, op1, target, unsignedp);
8351 case TRUNC_DIV_EXPR:
8352 case FLOOR_DIV_EXPR:
8354 case ROUND_DIV_EXPR:
8355 case EXACT_DIV_EXPR:
8356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8358 if (modifier == EXPAND_STACK_PARM)
8360 /* Possible optimization: compute the dividend with EXPAND_SUM
8361 then if the divisor is constant can optimize the case
8362 where some terms of the dividend have coeffs divisible by it. */
8363 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8364 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8365 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8368 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8369 expensive divide. If not, combine will rebuild the original
8371 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8372 && TREE_CODE (type) == REAL_TYPE
8373 && !real_onep (TREE_OPERAND (exp, 0)))
8374 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8375 build (RDIV_EXPR, type,
8376 build_real (type, dconst1),
8377 TREE_OPERAND (exp, 1))),
8378 target, tmode, modifier);
8379 this_optab = sdiv_optab;
8382 case TRUNC_MOD_EXPR:
8383 case FLOOR_MOD_EXPR:
8385 case ROUND_MOD_EXPR:
8386 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8388 if (modifier == EXPAND_STACK_PARM)
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8392 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8394 case FIX_ROUND_EXPR:
8395 case FIX_FLOOR_EXPR:
8397 abort (); /* Not used for C. */
8399 case FIX_TRUNC_EXPR:
8400 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8401 if (target == 0 || modifier == EXPAND_STACK_PARM)
8402 target = gen_reg_rtx (mode);
8403 expand_fix (target, op0, unsignedp);
8407 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8408 if (target == 0 || modifier == EXPAND_STACK_PARM)
8409 target = gen_reg_rtx (mode);
8410 /* expand_float can't figure out what to do if FROM has VOIDmode.
8411 So give it the correct mode. With -O, cse will optimize this. */
8412 if (GET_MODE (op0) == VOIDmode)
8413 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8415 expand_float (target, op0,
8416 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8420 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8421 if (modifier == EXPAND_STACK_PARM)
8423 temp = expand_unop (mode,
8424 ! unsignedp && flag_trapv
8425 && (GET_MODE_CLASS(mode) == MODE_INT)
8426 ? negv_optab : neg_optab, op0, target, 0);
8432 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8433 if (modifier == EXPAND_STACK_PARM)
8436 /* Handle complex values specially. */
8437 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8438 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8439 return expand_complex_abs (mode, op0, target, unsignedp);
8441 /* Unsigned abs is simply the operand. Testing here means we don't
8442 risk generating incorrect code below. */
8443 if (TREE_UNSIGNED (type))
8446 return expand_abs (mode, op0, target, unsignedp,
8447 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8451 target = original_target;
8453 || modifier == EXPAND_STACK_PARM
8454 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8455 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8456 || GET_MODE (target) != mode
8457 || (GET_CODE (target) == REG
8458 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8459 target = gen_reg_rtx (mode);
8460 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8461 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8463 /* First try to do it with a special MIN or MAX instruction.
8464 If that does not win, use a conditional jump to select the proper
8466 this_optab = (TREE_UNSIGNED (type)
8467 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8468 : (code == MIN_EXPR ? smin_optab : smax_optab));
8470 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8475 /* At this point, a MEM target is no longer useful; we will get better
8478 if (GET_CODE (target) == MEM)
8479 target = gen_reg_rtx (mode);
8482 emit_move_insn (target, op0);
8484 op0 = gen_label_rtx ();
8486 /* If this mode is an integer too wide to compare properly,
8487 compare word by word. Rely on cse to optimize constant cases. */
8488 if (GET_MODE_CLASS (mode) == MODE_INT
8489 && ! can_compare_p (GE, mode, ccp_jump))
8491 if (code == MAX_EXPR)
8492 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8493 target, op1, NULL_RTX, op0);
8495 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8496 op1, target, NULL_RTX, op0);
8500 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8501 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8502 unsignedp, mode, NULL_RTX, NULL_RTX,
8505 emit_move_insn (target, op1);
8510 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8511 if (modifier == EXPAND_STACK_PARM)
8513 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8520 if (modifier == EXPAND_STACK_PARM)
8522 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8528 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8529 temp = expand_unop (mode, clz_optab, op0, target, 1);
8535 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8536 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8543 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8549 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8550 temp = expand_unop (mode, parity_optab, op0, target, 1);
8555 /* ??? Can optimize bitwise operations with one arg constant.
8556 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8557 and (a bitwise1 b) bitwise2 b (etc)
8558 but that is probably not worth while. */
8560 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8561 boolean values when we want in all cases to compute both of them. In
8562 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8563 as actual zero-or-1 values and then bitwise anding. In cases where
8564 there cannot be any side effects, better code would be made by
8565 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8566 how to recognize those cases. */
8568 case TRUTH_AND_EXPR:
8570 this_optab = and_optab;
8575 this_optab = ior_optab;
8578 case TRUTH_XOR_EXPR:
8580 this_optab = xor_optab;
8587 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8589 if (modifier == EXPAND_STACK_PARM)
8591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8592 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8595 /* Could determine the answer when only additive constants differ. Also,
8596 the addition of one can be handled by changing the condition. */
8603 case UNORDERED_EXPR:
8610 temp = do_store_flag (exp,
8611 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8612 tmode != VOIDmode ? tmode : mode, 0);
8616 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8617 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8619 && GET_CODE (original_target) == REG
8620 && (GET_MODE (original_target)
8621 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8623 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8626 /* If temp is constant, we can just compute the result. */
8627 if (GET_CODE (temp) == CONST_INT)
8629 if (INTVAL (temp) != 0)
8630 emit_move_insn (target, const1_rtx);
8632 emit_move_insn (target, const0_rtx);
8637 if (temp != original_target)
8639 enum machine_mode mode1 = GET_MODE (temp);
8640 if (mode1 == VOIDmode)
8641 mode1 = tmode != VOIDmode ? tmode : mode;
8643 temp = copy_to_mode_reg (mode1, temp);
8646 op1 = gen_label_rtx ();
8647 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8648 GET_MODE (temp), unsignedp, op1);
8649 emit_move_insn (temp, const1_rtx);
8654 /* If no set-flag instruction, must generate a conditional
8655 store into a temporary variable. Drop through
8656 and handle this like && and ||. */
8658 case TRUTH_ANDIF_EXPR:
8659 case TRUTH_ORIF_EXPR:
8662 || modifier == EXPAND_STACK_PARM
8663 || ! safe_from_p (target, exp, 1)
8664 /* Make sure we don't have a hard reg (such as function's return
8665 value) live across basic blocks, if not optimizing. */
8666 || (!optimize && GET_CODE (target) == REG
8667 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8668 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8671 emit_clr_insn (target);
8673 op1 = gen_label_rtx ();
8674 jumpifnot (exp, op1);
8677 emit_0_to_1_insn (target);
8680 return ignore ? const0_rtx : target;
8682 case TRUTH_NOT_EXPR:
8683 if (modifier == EXPAND_STACK_PARM)
8685 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8686 /* The parser is careful to generate TRUTH_NOT_EXPR
8687 only with operands that are always zero or one. */
8688 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8689 target, 1, OPTAB_LIB_WIDEN);
8695 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8697 return expand_expr (TREE_OPERAND (exp, 1),
8698 (ignore ? const0_rtx : target),
8699 VOIDmode, modifier);
8702 /* If we would have a "singleton" (see below) were it not for a
8703 conversion in each arm, bring that conversion back out. */
8704 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8705 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8706 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8707 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8709 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8710 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8712 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8713 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8714 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8715 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8716 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8717 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8718 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8719 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8720 return expand_expr (build1 (NOP_EXPR, type,
8721 build (COND_EXPR, TREE_TYPE (iftrue),
8722 TREE_OPERAND (exp, 0),
8724 target, tmode, modifier);
8728 /* Note that COND_EXPRs whose type is a structure or union
8729 are required to be constructed to contain assignments of
8730 a temporary variable, so that we can evaluate them here
8731 for side effect only. If type is void, we must do likewise. */
8733 /* If an arm of the branch requires a cleanup,
8734 only that cleanup is performed. */
8737 tree binary_op = 0, unary_op = 0;
8739 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8740 convert it to our mode, if necessary. */
8741 if (integer_onep (TREE_OPERAND (exp, 1))
8742 && integer_zerop (TREE_OPERAND (exp, 2))
8743 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8747 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8752 if (modifier == EXPAND_STACK_PARM)
8754 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8755 if (GET_MODE (op0) == mode)
8759 target = gen_reg_rtx (mode);
8760 convert_move (target, op0, unsignedp);
8764 /* Check for X ? A + B : A. If we have this, we can copy A to the
8765 output and conditionally add B. Similarly for unary operations.
8766 Don't do this if X has side-effects because those side effects
8767 might affect A or B and the "?" operation is a sequence point in
8768 ANSI. (operand_equal_p tests for side effects.) */
8770 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8771 && operand_equal_p (TREE_OPERAND (exp, 2),
8772 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8773 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8774 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8775 && operand_equal_p (TREE_OPERAND (exp, 1),
8776 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8777 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8778 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8779 && operand_equal_p (TREE_OPERAND (exp, 2),
8780 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8781 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8782 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8783 && operand_equal_p (TREE_OPERAND (exp, 1),
8784 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8785 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8787 /* If we are not to produce a result, we have no target. Otherwise,
8788 if a target was specified use it; it will not be used as an
8789 intermediate target unless it is safe. If no target, use a
8794 else if (modifier == EXPAND_STACK_PARM)
8795 temp = assign_temp (type, 0, 0, 1);
8796 else if (original_target
8797 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8798 || (singleton && GET_CODE (original_target) == REG
8799 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8800 && original_target == var_rtx (singleton)))
8801 && GET_MODE (original_target) == mode
8802 #ifdef HAVE_conditional_move
8803 && (! can_conditionally_move_p (mode)
8804 || GET_CODE (original_target) == REG
8805 || TREE_ADDRESSABLE (type))
8807 && (GET_CODE (original_target) != MEM
8808 || TREE_ADDRESSABLE (type)))
8809 temp = original_target;
8810 else if (TREE_ADDRESSABLE (type))
8813 temp = assign_temp (type, 0, 0, 1);
8815 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8816 do the test of X as a store-flag operation, do this as
8817 A + ((X != 0) << log C). Similarly for other simple binary
8818 operators. Only do for C == 1 if BRANCH_COST is low. */
8819 if (temp && singleton && binary_op
8820 && (TREE_CODE (binary_op) == PLUS_EXPR
8821 || TREE_CODE (binary_op) == MINUS_EXPR
8822 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8823 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8824 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8825 : integer_onep (TREE_OPERAND (binary_op, 1)))
8826 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8830 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8831 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8832 ? addv_optab : add_optab)
8833 : TREE_CODE (binary_op) == MINUS_EXPR
8834 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8835 ? subv_optab : sub_optab)
8836 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8839 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8840 if (singleton == TREE_OPERAND (exp, 1))
8841 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8843 cond = TREE_OPERAND (exp, 0);
8845 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8847 mode, BRANCH_COST <= 1);
8849 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8850 result = expand_shift (LSHIFT_EXPR, mode, result,
8851 build_int_2 (tree_log2
8855 (safe_from_p (temp, singleton, 1)
8856 ? temp : NULL_RTX), 0);
8860 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8861 return expand_binop (mode, boptab, op1, result, temp,
8862 unsignedp, OPTAB_LIB_WIDEN);
8866 do_pending_stack_adjust ();
8868 op0 = gen_label_rtx ();
8870 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8874 /* If the target conflicts with the other operand of the
8875 binary op, we can't use it. Also, we can't use the target
8876 if it is a hard register, because evaluating the condition
8877 might clobber it. */
8879 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8880 || (GET_CODE (temp) == REG
8881 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8882 temp = gen_reg_rtx (mode);
8883 store_expr (singleton, temp,
8884 modifier == EXPAND_STACK_PARM ? 2 : 0);
8887 expand_expr (singleton,
8888 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8889 if (singleton == TREE_OPERAND (exp, 1))
8890 jumpif (TREE_OPERAND (exp, 0), op0);
8892 jumpifnot (TREE_OPERAND (exp, 0), op0);
8894 start_cleanup_deferral ();
8895 if (binary_op && temp == 0)
8896 /* Just touch the other operand. */
8897 expand_expr (TREE_OPERAND (binary_op, 1),
8898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8900 store_expr (build (TREE_CODE (binary_op), type,
8901 make_tree (type, temp),
8902 TREE_OPERAND (binary_op, 1)),
8903 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8905 store_expr (build1 (TREE_CODE (unary_op), type,
8906 make_tree (type, temp)),
8907 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8910 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8911 comparison operator. If we have one of these cases, set the
8912 output to A, branch on A (cse will merge these two references),
8913 then set the output to FOO. */
8915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8916 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8918 TREE_OPERAND (exp, 1), 0)
8919 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8920 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8921 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8923 if (GET_CODE (temp) == REG
8924 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8925 temp = gen_reg_rtx (mode);
8926 store_expr (TREE_OPERAND (exp, 1), temp,
8927 modifier == EXPAND_STACK_PARM ? 2 : 0);
8928 jumpif (TREE_OPERAND (exp, 0), op0);
8930 start_cleanup_deferral ();
8931 store_expr (TREE_OPERAND (exp, 2), temp,
8932 modifier == EXPAND_STACK_PARM ? 2 : 0);
8936 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8937 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8938 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8939 TREE_OPERAND (exp, 2), 0)
8940 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8941 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8942 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8944 if (GET_CODE (temp) == REG
8945 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8946 temp = gen_reg_rtx (mode);
8947 store_expr (TREE_OPERAND (exp, 2), temp,
8948 modifier == EXPAND_STACK_PARM ? 2 : 0);
8949 jumpifnot (TREE_OPERAND (exp, 0), op0);
8951 start_cleanup_deferral ();
8952 store_expr (TREE_OPERAND (exp, 1), temp,
8953 modifier == EXPAND_STACK_PARM ? 2 : 0);
8958 op1 = gen_label_rtx ();
8959 jumpifnot (TREE_OPERAND (exp, 0), op0);
8961 start_cleanup_deferral ();
8963 /* One branch of the cond can be void, if it never returns. For
8964 example A ? throw : E */
8966 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8967 store_expr (TREE_OPERAND (exp, 1), temp,
8968 modifier == EXPAND_STACK_PARM ? 2 : 0);
8970 expand_expr (TREE_OPERAND (exp, 1),
8971 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8972 end_cleanup_deferral ();
8974 emit_jump_insn (gen_jump (op1));
8977 start_cleanup_deferral ();
8979 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8980 store_expr (TREE_OPERAND (exp, 2), temp,
8981 modifier == EXPAND_STACK_PARM ? 2 : 0);
8983 expand_expr (TREE_OPERAND (exp, 2),
8984 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8987 end_cleanup_deferral ();
8998 /* Something needs to be initialized, but we didn't know
8999 where that thing was when building the tree. For example,
9000 it could be the return value of a function, or a parameter
9001 to a function which lays down in the stack, or a temporary
9002 variable which must be passed by reference.
9004 We guarantee that the expression will either be constructed
9005 or copied into our original target. */
9007 tree slot = TREE_OPERAND (exp, 0);
9008 tree cleanups = NULL_TREE;
9011 if (TREE_CODE (slot) != VAR_DECL)
9015 target = original_target;
9017 /* Set this here so that if we get a target that refers to a
9018 register variable that's already been used, put_reg_into_stack
9019 knows that it should fix up those uses. */
9020 TREE_USED (slot) = 1;
9024 if (DECL_RTL_SET_P (slot))
9026 target = DECL_RTL (slot);
9027 /* If we have already expanded the slot, so don't do
9029 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9034 target = assign_temp (type, 2, 0, 1);
9035 /* All temp slots at this level must not conflict. */
9036 preserve_temp_slots (target);
9037 SET_DECL_RTL (slot, target);
9038 if (TREE_ADDRESSABLE (slot))
9039 put_var_into_stack (slot, /*rescan=*/false);
9041 /* Since SLOT is not known to the called function
9042 to belong to its stack frame, we must build an explicit
9043 cleanup. This case occurs when we must build up a reference
9044 to pass the reference as an argument. In this case,
9045 it is very likely that such a reference need not be
9048 if (TREE_OPERAND (exp, 2) == 0)
9049 TREE_OPERAND (exp, 2)
9050 = (*lang_hooks.maybe_build_cleanup) (slot);
9051 cleanups = TREE_OPERAND (exp, 2);
9056 /* This case does occur, when expanding a parameter which
9057 needs to be constructed on the stack. The target
9058 is the actual stack address that we want to initialize.
9059 The function we call will perform the cleanup in this case. */
9061 /* If we have already assigned it space, use that space,
9062 not target that we were passed in, as our target
9063 parameter is only a hint. */
9064 if (DECL_RTL_SET_P (slot))
9066 target = DECL_RTL (slot);
9067 /* If we have already expanded the slot, so don't do
9069 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9074 SET_DECL_RTL (slot, target);
9075 /* If we must have an addressable slot, then make sure that
9076 the RTL that we just stored in slot is OK. */
9077 if (TREE_ADDRESSABLE (slot))
9078 put_var_into_stack (slot, /*rescan=*/true);
9082 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9083 /* Mark it as expanded. */
9084 TREE_OPERAND (exp, 1) = NULL_TREE;
9086 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9088 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9095 tree lhs = TREE_OPERAND (exp, 0);
9096 tree rhs = TREE_OPERAND (exp, 1);
9098 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9104 /* If lhs is complex, expand calls in rhs before computing it.
9105 That's so we don't compute a pointer and save it over a
9106 call. If lhs is simple, compute it first so we can give it
9107 as a target if the rhs is just a call. This avoids an
9108 extra temp and copy and that prevents a partial-subsumption
9109 which makes bad code. Actually we could treat
9110 component_ref's of vars like vars. */
9112 tree lhs = TREE_OPERAND (exp, 0);
9113 tree rhs = TREE_OPERAND (exp, 1);
9117 /* Check for |= or &= of a bitfield of size one into another bitfield
9118 of size 1. In this case, (unless we need the result of the
9119 assignment) we can do this more efficiently with a
9120 test followed by an assignment, if necessary.
9122 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9123 things change so we do, this code should be enhanced to
9126 && TREE_CODE (lhs) == COMPONENT_REF
9127 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9128 || TREE_CODE (rhs) == BIT_AND_EXPR)
9129 && TREE_OPERAND (rhs, 0) == lhs
9130 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9131 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9132 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9134 rtx label = gen_label_rtx ();
9136 do_jump (TREE_OPERAND (rhs, 1),
9137 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9138 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9139 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9140 (TREE_CODE (rhs) == BIT_IOR_EXPR
9142 : integer_zero_node)),
9144 do_pending_stack_adjust ();
9149 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9155 if (!TREE_OPERAND (exp, 0))
9156 expand_null_return ();
9158 expand_return (TREE_OPERAND (exp, 0));
9161 case PREINCREMENT_EXPR:
9162 case PREDECREMENT_EXPR:
9163 return expand_increment (exp, 0, ignore);
9165 case POSTINCREMENT_EXPR:
9166 case POSTDECREMENT_EXPR:
9167 /* Faster to treat as pre-increment if result is not used. */
9168 return expand_increment (exp, ! ignore, ignore);
9171 if (modifier == EXPAND_STACK_PARM)
9173 /* Are we taking the address of a nested function? */
9174 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9175 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9176 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9177 && ! TREE_STATIC (exp))
9179 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9180 op0 = force_operand (op0, target);
9182 /* If we are taking the address of something erroneous, just
9184 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9186 /* If we are taking the address of a constant and are at the
9187 top level, we have to use output_constant_def since we can't
9188 call force_const_mem at top level. */
9190 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9191 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9193 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9196 /* We make sure to pass const0_rtx down if we came in with
9197 ignore set, to avoid doing the cleanups twice for something. */
9198 op0 = expand_expr (TREE_OPERAND (exp, 0),
9199 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9200 (modifier == EXPAND_INITIALIZER
9201 ? modifier : EXPAND_CONST_ADDRESS));
9203 /* If we are going to ignore the result, OP0 will have been set
9204 to const0_rtx, so just return it. Don't get confused and
9205 think we are taking the address of the constant. */
9209 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9210 clever and returns a REG when given a MEM. */
9211 op0 = protect_from_queue (op0, 1);
9213 /* We would like the object in memory. If it is a constant, we can
9214 have it be statically allocated into memory. For a non-constant,
9215 we need to allocate some memory and store the value into it. */
9217 if (CONSTANT_P (op0))
9218 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9220 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9221 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9222 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9224 /* If the operand is a SAVE_EXPR, we can deal with this by
9225 forcing the SAVE_EXPR into memory. */
9226 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9228 put_var_into_stack (TREE_OPERAND (exp, 0),
9230 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9234 /* If this object is in a register, it can't be BLKmode. */
9235 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9236 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9238 if (GET_CODE (op0) == PARALLEL)
9239 /* Handle calls that pass values in multiple
9240 non-contiguous locations. The Irix 6 ABI has examples
9242 emit_group_store (memloc, op0,
9243 int_size_in_bytes (inner_type));
9245 emit_move_insn (memloc, op0);
9251 if (GET_CODE (op0) != MEM)
9254 mark_temp_addr_taken (op0);
9255 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9257 op0 = XEXP (op0, 0);
9258 #ifdef POINTERS_EXTEND_UNSIGNED
9259 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9260 && mode == ptr_mode)
9261 op0 = convert_memory_address (ptr_mode, op0);
9266 /* If OP0 is not aligned as least as much as the type requires, we
9267 need to make a temporary, copy OP0 to it, and take the address of
9268 the temporary. We want to use the alignment of the type, not of
9269 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9270 the test for BLKmode means that can't happen. The test for
9271 BLKmode is because we never make mis-aligned MEMs with
9274 We don't need to do this at all if the machine doesn't have
9275 strict alignment. */
9276 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9277 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9279 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9281 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9284 if (TYPE_ALIGN_OK (inner_type))
9287 if (TREE_ADDRESSABLE (inner_type))
9289 /* We can't make a bitwise copy of this object, so fail. */
9290 error ("cannot take the address of an unaligned member");
9294 new = assign_stack_temp_for_type
9295 (TYPE_MODE (inner_type),
9296 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9297 : int_size_in_bytes (inner_type),
9298 1, build_qualified_type (inner_type,
9299 (TYPE_QUALS (inner_type)
9300 | TYPE_QUAL_CONST)));
9302 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9303 (modifier == EXPAND_STACK_PARM
9304 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9309 op0 = force_operand (XEXP (op0, 0), target);
9313 && GET_CODE (op0) != REG
9314 && modifier != EXPAND_CONST_ADDRESS
9315 && modifier != EXPAND_INITIALIZER
9316 && modifier != EXPAND_SUM)
9317 op0 = force_reg (Pmode, op0);
9319 if (GET_CODE (op0) == REG
9320 && ! REG_USERVAR_P (op0))
9321 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9323 #ifdef POINTERS_EXTEND_UNSIGNED
9324 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9325 && mode == ptr_mode)
9326 op0 = convert_memory_address (ptr_mode, op0);
9331 case ENTRY_VALUE_EXPR:
9334 /* COMPLEX type for Extended Pascal & Fortran */
9337 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9340 /* Get the rtx code of the operands. */
9341 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9342 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9345 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9349 /* Move the real (op0) and imaginary (op1) parts to their location. */
9350 emit_move_insn (gen_realpart (mode, target), op0);
9351 emit_move_insn (gen_imagpart (mode, target), op1);
9353 insns = get_insns ();
9356 /* Complex construction should appear as a single unit. */
9357 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9358 each with a separate pseudo as destination.
9359 It's not correct for flow to treat them as a unit. */
9360 if (GET_CODE (target) != CONCAT)
9361 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9369 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9370 return gen_realpart (mode, op0);
9373 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9374 return gen_imagpart (mode, op0);
9378 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9382 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9385 target = gen_reg_rtx (mode);
9389 /* Store the realpart and the negated imagpart to target. */
9390 emit_move_insn (gen_realpart (partmode, target),
9391 gen_realpart (partmode, op0));
9393 imag_t = gen_imagpart (partmode, target);
9394 temp = expand_unop (partmode,
9395 ! unsignedp && flag_trapv
9396 && (GET_MODE_CLASS(partmode) == MODE_INT)
9397 ? negv_optab : neg_optab,
9398 gen_imagpart (partmode, op0), imag_t, 0);
9400 emit_move_insn (imag_t, temp);
9402 insns = get_insns ();
9405 /* Conjugate should appear as a single unit
9406 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9407 each with a separate pseudo as destination.
9408 It's not correct for flow to treat them as a unit. */
9409 if (GET_CODE (target) != CONCAT)
9410 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9417 case TRY_CATCH_EXPR:
9419 tree handler = TREE_OPERAND (exp, 1);
9421 expand_eh_region_start ();
9423 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9425 expand_eh_region_end_cleanup (handler);
9430 case TRY_FINALLY_EXPR:
9432 tree try_block = TREE_OPERAND (exp, 0);
9433 tree finally_block = TREE_OPERAND (exp, 1);
9435 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9437 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9438 is not sufficient, so we cannot expand the block twice.
9439 So we play games with GOTO_SUBROUTINE_EXPR to let us
9440 expand the thing only once. */
9441 /* When not optimizing, we go ahead with this form since
9442 (1) user breakpoints operate more predictably without
9443 code duplication, and
9444 (2) we're not running any of the global optimizers
9445 that would explode in time/space with the highly
9446 connected CFG created by the indirect branching. */
9448 rtx finally_label = gen_label_rtx ();
9449 rtx done_label = gen_label_rtx ();
9450 rtx return_link = gen_reg_rtx (Pmode);
9451 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9452 (tree) finally_label, (tree) return_link);
9453 TREE_SIDE_EFFECTS (cleanup) = 1;
9455 /* Start a new binding layer that will keep track of all cleanup
9456 actions to be performed. */
9457 expand_start_bindings (2);
9458 target_temp_slot_level = temp_slot_level;
9460 expand_decl_cleanup (NULL_TREE, cleanup);
9461 op0 = expand_expr (try_block, target, tmode, modifier);
9463 preserve_temp_slots (op0);
9464 expand_end_bindings (NULL_TREE, 0, 0);
9465 emit_jump (done_label);
9466 emit_label (finally_label);
9467 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9468 emit_indirect_jump (return_link);
9469 emit_label (done_label);
9473 expand_start_bindings (2);
9474 target_temp_slot_level = temp_slot_level;
9476 expand_decl_cleanup (NULL_TREE, finally_block);
9477 op0 = expand_expr (try_block, target, tmode, modifier);
9479 preserve_temp_slots (op0);
9480 expand_end_bindings (NULL_TREE, 0, 0);
9486 case GOTO_SUBROUTINE_EXPR:
9488 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9489 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9490 rtx return_address = gen_label_rtx ();
9491 emit_move_insn (return_link,
9492 gen_rtx_LABEL_REF (Pmode, return_address));
9494 emit_label (return_address);
9499 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9502 return get_exception_pointer (cfun);
9505 /* Function descriptors are not valid except for as
9506 initialization constants, and should not be expanded. */
9510 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9513 /* Here to do an ordinary binary operator, generating an instruction
9514 from the optab already placed in `this_optab'. */
9516 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9519 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9521 if (modifier == EXPAND_STACK_PARM)
9523 temp = expand_binop (mode, this_optab, op0, op1, target,
9524 unsignedp, OPTAB_LIB_WIDEN);
9530 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9531 when applied to the address of EXP produces an address known to be
9532 aligned more than BIGGEST_ALIGNMENT. */
9535 is_aligning_offset (offset, exp)
9539 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9540 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9541 || TREE_CODE (offset) == NOP_EXPR
9542 || TREE_CODE (offset) == CONVERT_EXPR
9543 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9544 offset = TREE_OPERAND (offset, 0);
9546 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9547 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9548 if (TREE_CODE (offset) != BIT_AND_EXPR
9549 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9550 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9551 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9554 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9555 It must be NEGATE_EXPR. Then strip any more conversions. */
9556 offset = TREE_OPERAND (offset, 0);
9557 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9558 || TREE_CODE (offset) == NOP_EXPR
9559 || TREE_CODE (offset) == CONVERT_EXPR)
9560 offset = TREE_OPERAND (offset, 0);
9562 if (TREE_CODE (offset) != NEGATE_EXPR)
9565 offset = TREE_OPERAND (offset, 0);
9566 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9567 || TREE_CODE (offset) == NOP_EXPR
9568 || TREE_CODE (offset) == CONVERT_EXPR)
9569 offset = TREE_OPERAND (offset, 0);
9571 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9572 whose type is the same as EXP. */
9573 return (TREE_CODE (offset) == ADDR_EXPR
9574 && (TREE_OPERAND (offset, 0) == exp
9575 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9576 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9577 == TREE_TYPE (exp)))));
9580 /* Return the tree node if an ARG corresponds to a string constant or zero
9581 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9582 in bytes within the string that ARG is accessing. The type of the
9583 offset will be `sizetype'. */
9586 string_constant (arg, ptr_offset)
9592 if (TREE_CODE (arg) == ADDR_EXPR
9593 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9595 *ptr_offset = size_zero_node;
9596 return TREE_OPERAND (arg, 0);
9598 else if (TREE_CODE (arg) == PLUS_EXPR)
9600 tree arg0 = TREE_OPERAND (arg, 0);
9601 tree arg1 = TREE_OPERAND (arg, 1);
9606 if (TREE_CODE (arg0) == ADDR_EXPR
9607 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9609 *ptr_offset = convert (sizetype, arg1);
9610 return TREE_OPERAND (arg0, 0);
9612 else if (TREE_CODE (arg1) == ADDR_EXPR
9613 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9615 *ptr_offset = convert (sizetype, arg0);
9616 return TREE_OPERAND (arg1, 0);
9623 /* Expand code for a post- or pre- increment or decrement
9624 and return the RTX for the result.
9625 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9628 expand_increment (exp, post, ignore)
9634 tree incremented = TREE_OPERAND (exp, 0);
9635 optab this_optab = add_optab;
9637 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9638 int op0_is_copy = 0;
9639 int single_insn = 0;
9640 /* 1 means we can't store into OP0 directly,
9641 because it is a subreg narrower than a word,
9642 and we don't dare clobber the rest of the word. */
9645 /* Stabilize any component ref that might need to be
9646 evaluated more than once below. */
9648 || TREE_CODE (incremented) == BIT_FIELD_REF
9649 || (TREE_CODE (incremented) == COMPONENT_REF
9650 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9651 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9652 incremented = stabilize_reference (incremented);
9653 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9654 ones into save exprs so that they don't accidentally get evaluated
9655 more than once by the code below. */
9656 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9657 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9658 incremented = save_expr (incremented);
9660 /* Compute the operands as RTX.
9661 Note whether OP0 is the actual lvalue or a copy of it:
9662 I believe it is a copy iff it is a register or subreg
9663 and insns were generated in computing it. */
9665 temp = get_last_insn ();
9666 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9668 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9669 in place but instead must do sign- or zero-extension during assignment,
9670 so we copy it into a new register and let the code below use it as
9673 Note that we can safely modify this SUBREG since it is know not to be
9674 shared (it was made by the expand_expr call above). */
9676 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9679 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9683 else if (GET_CODE (op0) == SUBREG
9684 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9686 /* We cannot increment this SUBREG in place. If we are
9687 post-incrementing, get a copy of the old value. Otherwise,
9688 just mark that we cannot increment in place. */
9690 op0 = copy_to_reg (op0);
9695 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9696 && temp != get_last_insn ());
9697 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9699 /* Decide whether incrementing or decrementing. */
9700 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9701 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9702 this_optab = sub_optab;
9704 /* Convert decrement by a constant into a negative increment. */
9705 if (this_optab == sub_optab
9706 && GET_CODE (op1) == CONST_INT)
9708 op1 = GEN_INT (-INTVAL (op1));
9709 this_optab = add_optab;
9712 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9713 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9715 /* For a preincrement, see if we can do this with a single instruction. */
9718 icode = (int) this_optab->handlers[(int) mode].insn_code;
9719 if (icode != (int) CODE_FOR_nothing
9720 /* Make sure that OP0 is valid for operands 0 and 1
9721 of the insn we want to queue. */
9722 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9723 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9724 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9728 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9729 then we cannot just increment OP0. We must therefore contrive to
9730 increment the original value. Then, for postincrement, we can return
9731 OP0 since it is a copy of the old value. For preincrement, expand here
9732 unless we can do it with a single insn.
9734 Likewise if storing directly into OP0 would clobber high bits
9735 we need to preserve (bad_subreg). */
9736 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9738 /* This is the easiest way to increment the value wherever it is.
9739 Problems with multiple evaluation of INCREMENTED are prevented
9740 because either (1) it is a component_ref or preincrement,
9741 in which case it was stabilized above, or (2) it is an array_ref
9742 with constant index in an array in a register, which is
9743 safe to reevaluate. */
9744 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9745 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9746 ? MINUS_EXPR : PLUS_EXPR),
9749 TREE_OPERAND (exp, 1));
9751 while (TREE_CODE (incremented) == NOP_EXPR
9752 || TREE_CODE (incremented) == CONVERT_EXPR)
9754 newexp = convert (TREE_TYPE (incremented), newexp);
9755 incremented = TREE_OPERAND (incremented, 0);
9758 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9759 return post ? op0 : temp;
9764 /* We have a true reference to the value in OP0.
9765 If there is an insn to add or subtract in this mode, queue it.
9766 Queueing the increment insn avoids the register shuffling
9767 that often results if we must increment now and first save
9768 the old value for subsequent use. */
9770 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9771 op0 = stabilize (op0);
9774 icode = (int) this_optab->handlers[(int) mode].insn_code;
9775 if (icode != (int) CODE_FOR_nothing
9776 /* Make sure that OP0 is valid for operands 0 and 1
9777 of the insn we want to queue. */
9778 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9779 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9781 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9782 op1 = force_reg (mode, op1);
9784 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9786 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9788 rtx addr = (general_operand (XEXP (op0, 0), mode)
9789 ? force_reg (Pmode, XEXP (op0, 0))
9790 : copy_to_reg (XEXP (op0, 0)));
9793 op0 = replace_equiv_address (op0, addr);
9794 temp = force_reg (GET_MODE (op0), op0);
9795 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9796 op1 = force_reg (mode, op1);
9798 /* The increment queue is LIFO, thus we have to `queue'
9799 the instructions in reverse order. */
9800 enqueue_insn (op0, gen_move_insn (op0, temp));
9801 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9806 /* Preincrement, or we can't increment with one simple insn. */
9808 /* Save a copy of the value before inc or dec, to return it later. */
9809 temp = value = copy_to_reg (op0);
9811 /* Arrange to return the incremented value. */
9812 /* Copy the rtx because expand_binop will protect from the queue,
9813 and the results of that would be invalid for us to return
9814 if our caller does emit_queue before using our result. */
9815 temp = copy_rtx (value = op0);
9817 /* Increment however we can. */
9818 op1 = expand_binop (mode, this_optab, value, op1, op0,
9819 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9821 /* Make sure the value is stored into OP0. */
9823 emit_move_insn (op0, op1);
9828 /* Generate code to calculate EXP using a store-flag instruction
9829 and return an rtx for the result. EXP is either a comparison
9830 or a TRUTH_NOT_EXPR whose operand is a comparison.
9832 If TARGET is nonzero, store the result there if convenient.
9834 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9837 Return zero if there is no suitable set-flag instruction
9838 available on this machine.
9840 Once expand_expr has been called on the arguments of the comparison,
9841 we are committed to doing the store flag, since it is not safe to
9842 re-evaluate the expression. We emit the store-flag insn by calling
9843 emit_store_flag, but only expand the arguments if we have a reason
9844 to believe that emit_store_flag will be successful. If we think that
9845 it will, but it isn't, we have to simulate the store-flag with a
9846 set/jump/set sequence. */
9849 do_store_flag (exp, target, mode, only_cheap)
9852 enum machine_mode mode;
9856 tree arg0, arg1, type;
9858 enum machine_mode operand_mode;
9862 enum insn_code icode;
9863 rtx subtarget = target;
9866 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9867 result at the end. We can't simply invert the test since it would
9868 have already been inverted if it were valid. This case occurs for
9869 some floating-point comparisons. */
9871 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9872 invert = 1, exp = TREE_OPERAND (exp, 0);
9874 arg0 = TREE_OPERAND (exp, 0);
9875 arg1 = TREE_OPERAND (exp, 1);
9877 /* Don't crash if the comparison was erroneous. */
9878 if (arg0 == error_mark_node || arg1 == error_mark_node)
9881 type = TREE_TYPE (arg0);
9882 operand_mode = TYPE_MODE (type);
9883 unsignedp = TREE_UNSIGNED (type);
9885 /* We won't bother with BLKmode store-flag operations because it would mean
9886 passing a lot of information to emit_store_flag. */
9887 if (operand_mode == BLKmode)
9890 /* We won't bother with store-flag operations involving function pointers
9891 when function pointers must be canonicalized before comparisons. */
9892 #ifdef HAVE_canonicalize_funcptr_for_compare
9893 if (HAVE_canonicalize_funcptr_for_compare
9894 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9895 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9897 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9898 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9899 == FUNCTION_TYPE))))
9906 /* Get the rtx comparison code to use. We know that EXP is a comparison
9907 operation of some type. Some comparisons against 1 and -1 can be
9908 converted to comparisons with zero. Do so here so that the tests
9909 below will be aware that we have a comparison with zero. These
9910 tests will not catch constants in the first operand, but constants
9911 are rarely passed as the first operand. */
9913 switch (TREE_CODE (exp))
9922 if (integer_onep (arg1))
9923 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9925 code = unsignedp ? LTU : LT;
9928 if (! unsignedp && integer_all_onesp (arg1))
9929 arg1 = integer_zero_node, code = LT;
9931 code = unsignedp ? LEU : LE;
9934 if (! unsignedp && integer_all_onesp (arg1))
9935 arg1 = integer_zero_node, code = GE;
9937 code = unsignedp ? GTU : GT;
9940 if (integer_onep (arg1))
9941 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9943 code = unsignedp ? GEU : GE;
9946 case UNORDERED_EXPR:
9972 /* Put a constant second. */
9973 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9975 tem = arg0; arg0 = arg1; arg1 = tem;
9976 code = swap_condition (code);
9979 /* If this is an equality or inequality test of a single bit, we can
9980 do this by shifting the bit being tested to the low-order bit and
9981 masking the result with the constant 1. If the condition was EQ,
9982 we xor it with 1. This does not require an scc insn and is faster
9983 than an scc insn even if we have it. */
9985 if ((code == NE || code == EQ)
9986 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9987 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9989 tree inner = TREE_OPERAND (arg0, 0);
9990 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9993 /* If INNER is a right shift of a constant and it plus BITNUM does
9994 not overflow, adjust BITNUM and INNER. */
9996 if (TREE_CODE (inner) == RSHIFT_EXPR
9997 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9998 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9999 && bitnum < TYPE_PRECISION (type)
10000 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10001 bitnum - TYPE_PRECISION (type)))
10003 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10004 inner = TREE_OPERAND (inner, 0);
10007 /* If we are going to be able to omit the AND below, we must do our
10008 operations as unsigned. If we must use the AND, we have a choice.
10009 Normally unsigned is faster, but for some machines signed is. */
10010 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10011 #ifdef LOAD_EXTEND_OP
10012 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10018 if (! get_subtarget (subtarget)
10019 || GET_MODE (subtarget) != operand_mode
10020 || ! safe_from_p (subtarget, inner, 1))
10023 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10026 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10027 size_int (bitnum), subtarget, ops_unsignedp);
10029 if (GET_MODE (op0) != mode)
10030 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10032 if ((code == EQ && ! invert) || (code == NE && invert))
10033 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10034 ops_unsignedp, OPTAB_LIB_WIDEN);
10036 /* Put the AND last so it can combine with more things. */
10037 if (bitnum != TYPE_PRECISION (type) - 1)
10038 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10043 /* Now see if we are likely to be able to do this. Return if not. */
10044 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10047 icode = setcc_gen_code[(int) code];
10048 if (icode == CODE_FOR_nothing
10049 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10051 /* We can only do this if it is one of the special cases that
10052 can be handled without an scc insn. */
10053 if ((code == LT && integer_zerop (arg1))
10054 || (! only_cheap && code == GE && integer_zerop (arg1)))
10056 else if (BRANCH_COST >= 0
10057 && ! only_cheap && (code == NE || code == EQ)
10058 && TREE_CODE (type) != REAL_TYPE
10059 && ((abs_optab->handlers[(int) operand_mode].insn_code
10060 != CODE_FOR_nothing)
10061 || (ffs_optab->handlers[(int) operand_mode].insn_code
10062 != CODE_FOR_nothing)))
10068 if (! get_subtarget (target)
10069 || GET_MODE (subtarget) != operand_mode
10070 || ! safe_from_p (subtarget, arg1, 1))
10073 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10074 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10077 target = gen_reg_rtx (mode);
10079 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10080 because, if the emit_store_flag does anything it will succeed and
10081 OP0 and OP1 will not be used subsequently. */
10083 result = emit_store_flag (target, code,
10084 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10085 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10086 operand_mode, unsignedp, 1);
10091 result = expand_binop (mode, xor_optab, result, const1_rtx,
10092 result, 0, OPTAB_LIB_WIDEN);
10096 /* If this failed, we have to do this with set/compare/jump/set code. */
10097 if (GET_CODE (target) != REG
10098 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10099 target = gen_reg_rtx (GET_MODE (target));
10101 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10102 result = compare_from_rtx (op0, op1, code, unsignedp,
10103 operand_mode, NULL_RTX);
10104 if (GET_CODE (result) == CONST_INT)
10105 return (((result == const0_rtx && ! invert)
10106 || (result != const0_rtx && invert))
10107 ? const0_rtx : const1_rtx);
10109 /* The code of RESULT may not match CODE if compare_from_rtx
10110 decided to swap its operands and reverse the original code.
10112 We know that compare_from_rtx returns either a CONST_INT or
10113 a new comparison code, so it is safe to just extract the
10114 code from RESULT. */
10115 code = GET_CODE (result);
10117 label = gen_label_rtx ();
10118 if (bcc_gen_fctn[(int) code] == 0)
10121 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10122 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10123 emit_label (label);
10129 /* Stubs in case we haven't got a casesi insn. */
10130 #ifndef HAVE_casesi
10131 # define HAVE_casesi 0
10132 # define gen_casesi(a, b, c, d, e) (0)
10133 # define CODE_FOR_casesi CODE_FOR_nothing
10136 /* If the machine does not have a case insn that compares the bounds,
10137 this means extra overhead for dispatch tables, which raises the
10138 threshold for using them. */
10139 #ifndef CASE_VALUES_THRESHOLD
10140 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10141 #endif /* CASE_VALUES_THRESHOLD */
10144 case_values_threshold ()
10146 return CASE_VALUES_THRESHOLD;
10149 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10150 0 otherwise (i.e. if there is no casesi instruction). */
10152 try_casesi (index_type, index_expr, minval, range,
10153 table_label, default_label)
10154 tree index_type, index_expr, minval, range;
10155 rtx table_label ATTRIBUTE_UNUSED;
10158 enum machine_mode index_mode = SImode;
10159 int index_bits = GET_MODE_BITSIZE (index_mode);
10160 rtx op1, op2, index;
10161 enum machine_mode op_mode;
10166 /* Convert the index to SImode. */
10167 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10169 enum machine_mode omode = TYPE_MODE (index_type);
10170 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10172 /* We must handle the endpoints in the original mode. */
10173 index_expr = build (MINUS_EXPR, index_type,
10174 index_expr, minval);
10175 minval = integer_zero_node;
10176 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10177 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10178 omode, 1, default_label);
10179 /* Now we can safely truncate. */
10180 index = convert_to_mode (index_mode, index, 0);
10184 if (TYPE_MODE (index_type) != index_mode)
10186 index_expr = convert ((*lang_hooks.types.type_for_size)
10187 (index_bits, 0), index_expr);
10188 index_type = TREE_TYPE (index_expr);
10191 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10194 index = protect_from_queue (index, 0);
10195 do_pending_stack_adjust ();
10197 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10198 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10200 index = copy_to_mode_reg (op_mode, index);
10202 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10204 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10205 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10206 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10207 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10209 op1 = copy_to_mode_reg (op_mode, op1);
10211 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10213 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10214 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10215 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10216 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10218 op2 = copy_to_mode_reg (op_mode, op2);
10220 emit_jump_insn (gen_casesi (index, op1, op2,
10221 table_label, default_label));
10225 /* Attempt to generate a tablejump instruction; same concept. */
10226 #ifndef HAVE_tablejump
10227 #define HAVE_tablejump 0
10228 #define gen_tablejump(x, y) (0)
10231 /* Subroutine of the next function.
10233 INDEX is the value being switched on, with the lowest value
10234 in the table already subtracted.
10235 MODE is its expected mode (needed if INDEX is constant).
10236 RANGE is the length of the jump table.
10237 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10239 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10240 index value is out of range. */
10243 do_tablejump (index, mode, range, table_label, default_label)
10244 rtx index, range, table_label, default_label;
10245 enum machine_mode mode;
10249 if (INTVAL (range) > cfun->max_jumptable_ents)
10250 cfun->max_jumptable_ents = INTVAL (range);
10252 /* Do an unsigned comparison (in the proper mode) between the index
10253 expression and the value which represents the length of the range.
10254 Since we just finished subtracting the lower bound of the range
10255 from the index expression, this comparison allows us to simultaneously
10256 check that the original index expression value is both greater than
10257 or equal to the minimum value of the range and less than or equal to
10258 the maximum value of the range. */
10260 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10263 /* If index is in range, it must fit in Pmode.
10264 Convert to Pmode so we can index with it. */
10266 index = convert_to_mode (Pmode, index, 1);
10268 /* Don't let a MEM slip thru, because then INDEX that comes
10269 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10270 and break_out_memory_refs will go to work on it and mess it up. */
10271 #ifdef PIC_CASE_VECTOR_ADDRESS
10272 if (flag_pic && GET_CODE (index) != REG)
10273 index = copy_to_mode_reg (Pmode, index);
10276 /* If flag_force_addr were to affect this address
10277 it could interfere with the tricky assumptions made
10278 about addresses that contain label-refs,
10279 which may be valid only very near the tablejump itself. */
10280 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10281 GET_MODE_SIZE, because this indicates how large insns are. The other
10282 uses should all be Pmode, because they are addresses. This code
10283 could fail if addresses and insns are not the same size. */
10284 index = gen_rtx_PLUS (Pmode,
10285 gen_rtx_MULT (Pmode, index,
10286 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10287 gen_rtx_LABEL_REF (Pmode, table_label));
10288 #ifdef PIC_CASE_VECTOR_ADDRESS
10290 index = PIC_CASE_VECTOR_ADDRESS (index);
10293 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10294 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10295 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10296 RTX_UNCHANGING_P (vector) = 1;
10297 MEM_NOTRAP_P (vector) = 1;
10298 convert_move (temp, vector, 0);
10300 emit_jump_insn (gen_tablejump (temp, table_label));
10302 /* If we are generating PIC code or if the table is PC-relative, the
10303 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10304 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10309 try_tablejump (index_type, index_expr, minval, range,
10310 table_label, default_label)
10311 tree index_type, index_expr, minval, range;
10312 rtx table_label, default_label;
10316 if (! HAVE_tablejump)
10319 index_expr = fold (build (MINUS_EXPR, index_type,
10320 convert (index_type, index_expr),
10321 convert (index_type, minval)));
10322 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10324 index = protect_from_queue (index, 0);
10325 do_pending_stack_adjust ();
10327 do_tablejump (index, TYPE_MODE (index_type),
10328 convert_modes (TYPE_MODE (index_type),
10329 TYPE_MODE (TREE_TYPE (range)),
10330 expand_expr (range, NULL_RTX,
10332 TREE_UNSIGNED (TREE_TYPE (range))),
10333 table_label, default_label);
10337 /* Nonzero if the mode is a valid vector mode for this architecture.
10338 This returns nonzero even if there is no hardware support for the
10339 vector mode, but we can emulate with narrower modes. */
10342 vector_mode_valid_p (mode)
10343 enum machine_mode mode;
10345 enum mode_class class = GET_MODE_CLASS (mode);
10346 enum machine_mode innermode;
10348 /* Doh! What's going on? */
10349 if (class != MODE_VECTOR_INT
10350 && class != MODE_VECTOR_FLOAT)
10353 /* Hardware support. Woo hoo! */
10354 if (VECTOR_MODE_SUPPORTED_P (mode))
10357 innermode = GET_MODE_INNER (mode);
10359 /* We should probably return 1 if requesting V4DI and we have no DI,
10360 but we have V2DI, but this is probably very unlikely. */
10362 /* If we have support for the inner mode, we can safely emulate it.
10363 We may not have V2DI, but me can emulate with a pair of DIs. */
10364 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10367 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10369 const_vector_from_tree (exp)
10375 enum machine_mode inner, mode;
10377 mode = TYPE_MODE (TREE_TYPE (exp));
10379 if (is_zeros_p (exp))
10380 return CONST0_RTX (mode);
10382 units = GET_MODE_NUNITS (mode);
10383 inner = GET_MODE_INNER (mode);
10385 v = rtvec_alloc (units);
10387 link = TREE_VECTOR_CST_ELTS (exp);
10388 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10390 elt = TREE_VALUE (link);
10392 if (TREE_CODE (elt) == REAL_CST)
10393 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10396 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10397 TREE_INT_CST_HIGH (elt),
10401 return gen_rtx_raw_CONST_VECTOR (mode, v);
10404 #include "gt-expr.h"