1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (to_real != from_real)
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
622 emit_unop_insn (code, to, from, UNKNOWN);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
798 libcall = truncdfsf2_libfunc;
802 libcall = extenddfxf2_libfunc;
806 libcall = extenddftf2_libfunc;
818 libcall = truncxfsf2_libfunc;
822 libcall = truncxfdf2_libfunc;
834 libcall = trunctfsf2_libfunc;
838 libcall = trunctfdf2_libfunc;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
857 insns = get_insns ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
926 fill_value = const0_rtx;
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1136 enum machine_mode intermediate;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1166 emit_move_insn (to, tmp);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1469 move_by_pieces (to, from, len, align)
1471 unsigned HOST_WIDE_INT len;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1481 data.from_addr = from_addr;
1484 to_addr = XEXP (to, 0);
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1497 #ifdef STACK_GROWS_DOWNWARD
1503 data.to_addr = to_addr;
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1571 if (mode == VOIDmode)
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1611 if (mode == VOIDmode)
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1642 data->offset -= size;
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1667 emit_insn ((*genfun) (to1, from1));
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1702 emit_block_move (x, y, size, method)
1704 enum block_op_methods method;
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1736 if (GET_MODE (y) != BLKmode)
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1745 if (GET_CODE (y) != MEM)
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1780 block_move_libcall_safe_for_call_parm ()
1786 /* Check to see whether memcpy takes all register arguments. */
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1811 goto fail_takes_regs;
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1819 case takes_regs_yes:
1823 takes_regs = takes_regs_no;
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1838 emit_block_move_via_movstr (x, y, size, align)
1842 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1843 enum machine_mode mode;
1845 /* Since this is a move insn, we don't care about volatility. */
1848 /* Try the most limited insn first, because there's no point
1849 including more than one in the machine description unless
1850 the more limited one has some advantage. */
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1875 rtx last = get_last_insn ();
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1896 delete_insns_since (last);
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1908 emit_block_move_via_libcall (dst, src, size)
1911 rtx dst_addr, src_addr;
1912 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1913 enum machine_mode size_mode;
1916 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1918 It is unsafe to save the value generated by protect_from_queue and reuse
1919 it later. Consider what happens if emit_queue is called before the
1920 return value from protect_from_queue is used.
1922 Expansion of the CALL_EXPR below will call emit_queue before we are
1923 finished emitting RTL for argument setup. So if we are not careful we
1924 could get the wrong value for an argument.
1926 To avoid this problem we go ahead and emit code to copy the addresses of
1927 DST and SRC and SIZE into new pseudos. We can then place those new
1928 pseudos into an RTL_EXPR and use them later, even after a call to
1931 Note this is not strictly needed for library calls since they do not call
1932 emit_queue before loading their arguments. However, we may need to have
1933 library calls call emit_queue in the future since failing to do so could
1934 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1935 arguments in registers. */
1937 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1938 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1940 #ifdef POINTERS_EXTEND_UNSIGNED
1941 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1942 src_addr = convert_memory_address (ptr_mode, src_addr);
1945 dst_tree = make_tree (ptr_type_node, dst_addr);
1946 src_tree = make_tree (ptr_type_node, src_addr);
1948 if (TARGET_MEM_FUNCTIONS)
1949 size_mode = TYPE_MODE (sizetype);
1951 size_mode = TYPE_MODE (unsigned_type_node);
1953 size = convert_to_mode (size_mode, size, 1);
1954 size = copy_to_mode_reg (size_mode, size);
1956 /* It is incorrect to use the libcall calling conventions to call
1957 memcpy in this context. This could be a user call to memcpy and
1958 the user may wish to examine the return value from memcpy. For
1959 targets where libcalls and normal calls have different conventions
1960 for returning pointers, we could end up generating incorrect code.
1962 For convenience, we generate the call to bcopy this way as well. */
1964 if (TARGET_MEM_FUNCTIONS)
1965 size_tree = make_tree (sizetype, size);
1967 size_tree = make_tree (unsigned_type_node, size);
1969 fn = emit_block_move_libcall_fn (true);
1970 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1971 if (TARGET_MEM_FUNCTIONS)
1973 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1978 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1979 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1982 /* Now we have to build up the CALL_EXPR itself. */
1983 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1984 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1985 call_expr, arg_list, NULL_TREE);
1986 TREE_SIDE_EFFECTS (call_expr) = 1;
1988 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1990 /* If we are initializing a readonly value, show the above call clobbered
1991 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1992 the delay slot scheduler might overlook conflicts and take nasty
1994 if (RTX_UNCHANGING_P (dst))
1995 add_function_usage_to
1996 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1997 gen_rtx_CLOBBER (VOIDmode, dst),
2000 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2003 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2004 for the function we use for block copies. The first time FOR_CALL
2005 is true, we call assemble_external. */
2007 static GTY(()) tree block_move_fn;
2010 init_block_move_fn (asmspec)
2011 const char *asmspec;
2017 if (TARGET_MEM_FUNCTIONS)
2019 fn = get_identifier ("memcpy");
2020 args = build_function_type_list (ptr_type_node, ptr_type_node,
2021 const_ptr_type_node, sizetype,
2026 fn = get_identifier ("bcopy");
2027 args = build_function_type_list (void_type_node, const_ptr_type_node,
2028 ptr_type_node, unsigned_type_node,
2032 fn = build_decl (FUNCTION_DECL, fn, args);
2033 DECL_EXTERNAL (fn) = 1;
2034 TREE_PUBLIC (fn) = 1;
2035 DECL_ARTIFICIAL (fn) = 1;
2036 TREE_NOTHROW (fn) = 1;
2043 SET_DECL_RTL (block_move_fn, NULL_RTX);
2044 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2049 emit_block_move_libcall_fn (for_call)
2052 static bool emitted_extern;
2055 init_block_move_fn (NULL);
2057 if (for_call && !emitted_extern)
2059 emitted_extern = true;
2060 make_decl_rtl (block_move_fn, NULL);
2061 assemble_external (block_move_fn);
2064 return block_move_fn;
2067 /* A subroutine of emit_block_move. Copy the data via an explicit
2068 loop. This is used only when libcalls are forbidden. */
2069 /* ??? It'd be nice to copy in hunks larger than QImode. */
2072 emit_block_move_via_loop (x, y, size, align)
2074 unsigned int align ATTRIBUTE_UNUSED;
2076 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2077 enum machine_mode iter_mode;
2079 iter_mode = GET_MODE (size);
2080 if (iter_mode == VOIDmode)
2081 iter_mode = word_mode;
2083 top_label = gen_label_rtx ();
2084 cmp_label = gen_label_rtx ();
2085 iter = gen_reg_rtx (iter_mode);
2087 emit_move_insn (iter, const0_rtx);
2089 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2090 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2091 do_pending_stack_adjust ();
2093 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2095 emit_jump (cmp_label);
2096 emit_label (top_label);
2098 tmp = convert_modes (Pmode, iter_mode, iter, true);
2099 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2100 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2101 x = change_address (x, QImode, x_addr);
2102 y = change_address (y, QImode, y_addr);
2104 emit_move_insn (x, y);
2106 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2107 true, OPTAB_LIB_WIDEN);
2109 emit_move_insn (iter, tmp);
2111 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2112 emit_label (cmp_label);
2114 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2117 emit_note (NULL, NOTE_INSN_LOOP_END);
2120 /* Copy all or part of a value X into registers starting at REGNO.
2121 The number of registers to be filled is NREGS. */
2124 move_block_to_reg (regno, x, nregs, mode)
2128 enum machine_mode mode;
2131 #ifdef HAVE_load_multiple
2139 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2140 x = validize_mem (force_const_mem (mode, x));
2142 /* See if the machine can do this with a load multiple insn. */
2143 #ifdef HAVE_load_multiple
2144 if (HAVE_load_multiple)
2146 last = get_last_insn ();
2147 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2155 delete_insns_since (last);
2159 for (i = 0; i < nregs; i++)
2160 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2161 operand_subword_force (x, i, mode));
2164 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2165 The number of registers to be filled is NREGS. SIZE indicates the number
2166 of bytes in the object X. */
2169 move_block_from_reg (regno, x, nregs, size)
2176 #ifdef HAVE_store_multiple
2180 enum machine_mode mode;
2185 /* If SIZE is that of a mode no bigger than a word, just use that
2186 mode's store operation. */
2187 if (size <= UNITS_PER_WORD
2188 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2190 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2194 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2195 to the left before storing to memory. Note that the previous test
2196 doesn't handle all cases (e.g. SIZE == 3). */
2197 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2199 rtx tem = operand_subword (x, 0, 1, BLKmode);
2205 shift = expand_shift (LSHIFT_EXPR, word_mode,
2206 gen_rtx_REG (word_mode, regno),
2207 build_int_2 ((UNITS_PER_WORD - size)
2208 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2209 emit_move_insn (tem, shift);
2213 /* See if the machine can do this with a store multiple insn. */
2214 #ifdef HAVE_store_multiple
2215 if (HAVE_store_multiple)
2217 last = get_last_insn ();
2218 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2226 delete_insns_since (last);
2230 for (i = 0; i < nregs; i++)
2232 rtx tem = operand_subword (x, i, 1, BLKmode);
2237 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2241 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2242 ORIG, where ORIG is a non-consecutive group of registers represented by
2243 a PARALLEL. The clone is identical to the original except in that the
2244 original set of registers is replaced by a new set of pseudo registers.
2245 The new set has the same modes as the original set. */
2248 gen_group_rtx (orig)
2254 if (GET_CODE (orig) != PARALLEL)
2257 length = XVECLEN (orig, 0);
2258 tmps = (rtx *) alloca (sizeof (rtx) * length);
2260 /* Skip a NULL entry in first slot. */
2261 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2266 for (; i < length; i++)
2268 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2269 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2271 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2274 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2277 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2278 registers represented by a PARALLEL. SSIZE represents the total size of
2279 block SRC in bytes, or -1 if not known. */
2280 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2281 the balance will be in what would be the low-order memory addresses, i.e.
2282 left justified for big endian, right justified for little endian. This
2283 happens to be true for the targets currently using this support. If this
2284 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2288 emit_group_load (dst, orig_src, ssize)
2295 if (GET_CODE (dst) != PARALLEL)
2298 /* Check for a NULL entry, used to indicate that the parameter goes
2299 both on the stack and in registers. */
2300 if (XEXP (XVECEXP (dst, 0, 0), 0))
2305 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2307 /* Process the pieces. */
2308 for (i = start; i < XVECLEN (dst, 0); i++)
2310 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2311 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2312 unsigned int bytelen = GET_MODE_SIZE (mode);
2315 /* Handle trailing fragments that run over the size of the struct. */
2316 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2318 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2319 bytelen = ssize - bytepos;
2324 /* If we won't be loading directly from memory, protect the real source
2325 from strange tricks we might play; but make sure that the source can
2326 be loaded directly into the destination. */
2328 if (GET_CODE (orig_src) != MEM
2329 && (!CONSTANT_P (orig_src)
2330 || (GET_MODE (orig_src) != mode
2331 && GET_MODE (orig_src) != VOIDmode)))
2333 if (GET_MODE (orig_src) == VOIDmode)
2334 src = gen_reg_rtx (mode);
2336 src = gen_reg_rtx (GET_MODE (orig_src));
2338 emit_move_insn (src, orig_src);
2341 /* Optimize the access just a bit. */
2342 if (GET_CODE (src) == MEM
2343 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2344 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2345 && bytelen == GET_MODE_SIZE (mode))
2347 tmps[i] = gen_reg_rtx (mode);
2348 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2350 else if (GET_CODE (src) == CONCAT)
2352 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2353 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2355 if ((bytepos == 0 && bytelen == slen0)
2356 || (bytepos != 0 && bytepos + bytelen <= slen))
2358 /* The following assumes that the concatenated objects all
2359 have the same size. In this case, a simple calculation
2360 can be used to determine the object and the bit field
2362 tmps[i] = XEXP (src, bytepos / slen0);
2363 if (! CONSTANT_P (tmps[i])
2364 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2365 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2366 (bytepos % slen0) * BITS_PER_UNIT,
2367 1, NULL_RTX, mode, mode, ssize);
2369 else if (bytepos == 0)
2371 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2372 emit_move_insn (mem, src);
2373 tmps[i] = adjust_address (mem, mode, 0);
2378 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2379 SIMD register, which is currently broken. While we get GCC
2380 to emit proper RTL for these cases, let's dump to memory. */
2381 else if (VECTOR_MODE_P (GET_MODE (dst))
2382 && GET_CODE (src) == REG)
2384 int slen = GET_MODE_SIZE (GET_MODE (src));
2387 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2388 emit_move_insn (mem, src);
2389 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2391 else if (CONSTANT_P (src)
2392 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2395 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2396 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2399 if (BYTES_BIG_ENDIAN && shift)
2400 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2401 tmps[i], 0, OPTAB_WIDEN);
2406 /* Copy the extracted pieces into the proper (probable) hard regs. */
2407 for (i = start; i < XVECLEN (dst, 0); i++)
2408 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2411 /* Emit code to move a block SRC to block DST, where SRC and DST are
2412 non-consecutive groups of registers, each represented by a PARALLEL. */
2415 emit_group_move (dst, src)
2420 if (GET_CODE (src) != PARALLEL
2421 || GET_CODE (dst) != PARALLEL
2422 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2425 /* Skip first entry if NULL. */
2426 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2427 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2428 XEXP (XVECEXP (src, 0, i), 0));
2431 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2432 registers represented by a PARALLEL. SSIZE represents the total size of
2433 block DST, or -1 if not known. */
2436 emit_group_store (orig_dst, src, ssize)
2443 if (GET_CODE (src) != PARALLEL)
2446 /* Check for a NULL entry, used to indicate that the parameter goes
2447 both on the stack and in registers. */
2448 if (XEXP (XVECEXP (src, 0, 0), 0))
2453 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2455 /* Copy the (probable) hard regs into pseudos. */
2456 for (i = start; i < XVECLEN (src, 0); i++)
2458 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2459 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2460 emit_move_insn (tmps[i], reg);
2464 /* If we won't be storing directly into memory, protect the real destination
2465 from strange tricks we might play. */
2467 if (GET_CODE (dst) == PARALLEL)
2471 /* We can get a PARALLEL dst if there is a conditional expression in
2472 a return statement. In that case, the dst and src are the same,
2473 so no action is necessary. */
2474 if (rtx_equal_p (dst, src))
2477 /* It is unclear if we can ever reach here, but we may as well handle
2478 it. Allocate a temporary, and split this into a store/load to/from
2481 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2482 emit_group_store (temp, src, ssize);
2483 emit_group_load (dst, temp, ssize);
2486 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2488 dst = gen_reg_rtx (GET_MODE (orig_dst));
2489 /* Make life a bit easier for combine. */
2490 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2493 /* Process the pieces. */
2494 for (i = start; i < XVECLEN (src, 0); i++)
2496 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2497 enum machine_mode mode = GET_MODE (tmps[i]);
2498 unsigned int bytelen = GET_MODE_SIZE (mode);
2501 /* Handle trailing fragments that run over the size of the struct. */
2502 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2504 if (BYTES_BIG_ENDIAN)
2506 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2507 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2508 tmps[i], 0, OPTAB_WIDEN);
2510 bytelen = ssize - bytepos;
2513 if (GET_CODE (dst) == CONCAT)
2515 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2516 dest = XEXP (dst, 0);
2517 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2519 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2520 dest = XEXP (dst, 1);
2522 else if (bytepos == 0 && XVECLEN (src, 0))
2524 dest = assign_stack_temp (GET_MODE (dest),
2525 GET_MODE_SIZE (GET_MODE (dest)), 0);
2526 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2535 /* Optimize the access just a bit. */
2536 if (GET_CODE (dest) == MEM
2537 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2538 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2539 && bytelen == GET_MODE_SIZE (mode))
2540 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2542 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2543 mode, tmps[i], ssize);
2548 /* Copy from the pseudo into the (probable) hard reg. */
2549 if (orig_dst != dst)
2550 emit_move_insn (orig_dst, dst);
2553 /* Generate code to copy a BLKmode object of TYPE out of a
2554 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2555 is null, a stack temporary is created. TGTBLK is returned.
2557 The primary purpose of this routine is to handle functions
2558 that return BLKmode structures in registers. Some machines
2559 (the PA for example) want to return all small structures
2560 in registers regardless of the structure's alignment. */
2563 copy_blkmode_from_reg (tgtblk, srcreg, type)
2568 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2569 rtx src = NULL, dst = NULL;
2570 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2571 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2575 tgtblk = assign_temp (build_qualified_type (type,
2577 | TYPE_QUAL_CONST)),
2579 preserve_temp_slots (tgtblk);
2582 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2583 into a new pseudo which is a full word. */
2585 if (GET_MODE (srcreg) != BLKmode
2586 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2587 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2589 /* Structures whose size is not a multiple of a word are aligned
2590 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2591 machine, this means we must skip the empty high order bytes when
2592 calculating the bit offset. */
2593 if (BYTES_BIG_ENDIAN
2594 && bytes % UNITS_PER_WORD)
2595 big_endian_correction
2596 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2598 /* Copy the structure BITSIZE bites at a time.
2600 We could probably emit more efficient code for machines which do not use
2601 strict alignment, but it doesn't seem worth the effort at the current
2603 for (bitpos = 0, xbitpos = big_endian_correction;
2604 bitpos < bytes * BITS_PER_UNIT;
2605 bitpos += bitsize, xbitpos += bitsize)
2607 /* We need a new source operand each time xbitpos is on a
2608 word boundary and when xbitpos == big_endian_correction
2609 (the first time through). */
2610 if (xbitpos % BITS_PER_WORD == 0
2611 || xbitpos == big_endian_correction)
2612 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2615 /* We need a new destination operand each time bitpos is on
2617 if (bitpos % BITS_PER_WORD == 0)
2618 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2620 /* Use xbitpos for the source extraction (right justified) and
2621 xbitpos for the destination store (left justified). */
2622 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2623 extract_bit_field (src, bitsize,
2624 xbitpos % BITS_PER_WORD, 1,
2625 NULL_RTX, word_mode, word_mode,
2633 /* Add a USE expression for REG to the (possibly empty) list pointed
2634 to by CALL_FUSAGE. REG must denote a hard register. */
2637 use_reg (call_fusage, reg)
2638 rtx *call_fusage, reg;
2640 if (GET_CODE (reg) != REG
2641 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2645 = gen_rtx_EXPR_LIST (VOIDmode,
2646 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2649 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2650 starting at REGNO. All of these registers must be hard registers. */
2653 use_regs (call_fusage, regno, nregs)
2660 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2663 for (i = 0; i < nregs; i++)
2664 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2667 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2668 PARALLEL REGS. This is for calls that pass values in multiple
2669 non-contiguous locations. The Irix 6 ABI has examples of this. */
2672 use_group_regs (call_fusage, regs)
2678 for (i = 0; i < XVECLEN (regs, 0); i++)
2680 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2682 /* A NULL entry means the parameter goes both on the stack and in
2683 registers. This can also be a MEM for targets that pass values
2684 partially on the stack and partially in registers. */
2685 if (reg != 0 && GET_CODE (reg) == REG)
2686 use_reg (call_fusage, reg);
2691 /* Determine whether the LEN bytes generated by CONSTFUN can be
2692 stored to memory using several move instructions. CONSTFUNDATA is
2693 a pointer which will be passed as argument in every CONSTFUN call.
2694 ALIGN is maximum alignment we can assume. Return nonzero if a
2695 call to store_by_pieces should succeed. */
2698 can_store_by_pieces (len, constfun, constfundata, align)
2699 unsigned HOST_WIDE_INT len;
2700 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2704 unsigned HOST_WIDE_INT max_size, l;
2705 HOST_WIDE_INT offset = 0;
2706 enum machine_mode mode, tmode;
2707 enum insn_code icode;
2711 if (! STORE_BY_PIECES_P (len, align))
2714 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2715 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2716 align = MOVE_MAX * BITS_PER_UNIT;
2718 /* We would first store what we can in the largest integer mode, then go to
2719 successively smaller modes. */
2722 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2727 max_size = STORE_MAX_PIECES + 1;
2728 while (max_size > 1)
2730 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2731 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2732 if (GET_MODE_SIZE (tmode) < max_size)
2735 if (mode == VOIDmode)
2738 icode = mov_optab->handlers[(int) mode].insn_code;
2739 if (icode != CODE_FOR_nothing
2740 && align >= GET_MODE_ALIGNMENT (mode))
2742 unsigned int size = GET_MODE_SIZE (mode);
2749 cst = (*constfun) (constfundata, offset, mode);
2750 if (!LEGITIMATE_CONSTANT_P (cst))
2760 max_size = GET_MODE_SIZE (mode);
2763 /* The code above should have handled everything. */
2771 /* Generate several move instructions to store LEN bytes generated by
2772 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2773 pointer which will be passed as argument in every CONSTFUN call.
2774 ALIGN is maximum alignment we can assume. */
2777 store_by_pieces (to, len, constfun, constfundata, align)
2779 unsigned HOST_WIDE_INT len;
2780 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2784 struct store_by_pieces data;
2786 if (! STORE_BY_PIECES_P (len, align))
2788 to = protect_from_queue (to, 1);
2789 data.constfun = constfun;
2790 data.constfundata = constfundata;
2793 store_by_pieces_1 (&data, align);
2796 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2797 rtx with BLKmode). The caller must pass TO through protect_from_queue
2798 before calling. ALIGN is maximum alignment we can assume. */
2801 clear_by_pieces (to, len, align)
2803 unsigned HOST_WIDE_INT len;
2806 struct store_by_pieces data;
2808 data.constfun = clear_by_pieces_1;
2809 data.constfundata = NULL;
2812 store_by_pieces_1 (&data, align);
2815 /* Callback routine for clear_by_pieces.
2816 Return const0_rtx unconditionally. */
2819 clear_by_pieces_1 (data, offset, mode)
2820 PTR data ATTRIBUTE_UNUSED;
2821 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2822 enum machine_mode mode ATTRIBUTE_UNUSED;
2827 /* Subroutine of clear_by_pieces and store_by_pieces.
2828 Generate several move instructions to store LEN bytes of block TO. (A MEM
2829 rtx with BLKmode). The caller must pass TO through protect_from_queue
2830 before calling. ALIGN is maximum alignment we can assume. */
2833 store_by_pieces_1 (data, align)
2834 struct store_by_pieces *data;
2837 rtx to_addr = XEXP (data->to, 0);
2838 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2839 enum machine_mode mode = VOIDmode, tmode;
2840 enum insn_code icode;
2843 data->to_addr = to_addr;
2845 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2846 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2848 data->explicit_inc_to = 0;
2850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2852 data->offset = data->len;
2854 /* If storing requires more than two move insns,
2855 copy addresses to registers (to make displacements shorter)
2856 and use post-increment if available. */
2857 if (!data->autinc_to
2858 && move_by_pieces_ninsns (data->len, align) > 2)
2860 /* Determine the main mode we'll be using. */
2861 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2862 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2863 if (GET_MODE_SIZE (tmode) < max_size)
2866 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2868 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2869 data->autinc_to = 1;
2870 data->explicit_inc_to = -1;
2873 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2874 && ! data->autinc_to)
2876 data->to_addr = copy_addr_to_reg (to_addr);
2877 data->autinc_to = 1;
2878 data->explicit_inc_to = 1;
2881 if ( !data->autinc_to && CONSTANT_P (to_addr))
2882 data->to_addr = copy_addr_to_reg (to_addr);
2885 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2886 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2887 align = MOVE_MAX * BITS_PER_UNIT;
2889 /* First store what we can in the largest integer mode, then go to
2890 successively smaller modes. */
2892 while (max_size > 1)
2894 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2895 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2896 if (GET_MODE_SIZE (tmode) < max_size)
2899 if (mode == VOIDmode)
2902 icode = mov_optab->handlers[(int) mode].insn_code;
2903 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2904 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2906 max_size = GET_MODE_SIZE (mode);
2909 /* The code above should have handled everything. */
2914 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2915 with move instructions for mode MODE. GENFUN is the gen_... function
2916 to make a move insn for that mode. DATA has all the other info. */
2919 store_by_pieces_2 (genfun, mode, data)
2920 rtx (*genfun) PARAMS ((rtx, ...));
2921 enum machine_mode mode;
2922 struct store_by_pieces *data;
2924 unsigned int size = GET_MODE_SIZE (mode);
2927 while (data->len >= size)
2930 data->offset -= size;
2932 if (data->autinc_to)
2933 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2936 to1 = adjust_address (data->to, mode, data->offset);
2938 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2939 emit_insn (gen_add2_insn (data->to_addr,
2940 GEN_INT (-(HOST_WIDE_INT) size)));
2942 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2943 emit_insn ((*genfun) (to1, cst));
2945 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2946 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2948 if (! data->reverse)
2949 data->offset += size;
2955 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2956 its length in bytes. */
2959 clear_storage (object, size)
2964 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2965 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2967 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2968 just move a zero. Otherwise, do this a piece at a time. */
2969 if (GET_MODE (object) != BLKmode
2970 && GET_CODE (size) == CONST_INT
2971 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2972 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2975 object = protect_from_queue (object, 1);
2976 size = protect_from_queue (size, 0);
2978 if (GET_CODE (size) == CONST_INT
2979 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2980 clear_by_pieces (object, INTVAL (size), align);
2981 else if (clear_storage_via_clrstr (object, size, align))
2984 retval = clear_storage_via_libcall (object, size);
2990 /* A subroutine of clear_storage. Expand a clrstr pattern;
2991 return true if successful. */
2994 clear_storage_via_clrstr (object, size, align)
2998 /* Try the most limited insn first, because there's no point
2999 including more than one in the machine description unless
3000 the more limited one has some advantage. */
3002 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3003 enum machine_mode mode;
3005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3006 mode = GET_MODE_WIDER_MODE (mode))
3008 enum insn_code code = clrstr_optab[(int) mode];
3009 insn_operand_predicate_fn pred;
3011 if (code != CODE_FOR_nothing
3012 /* We don't need MODE to be narrower than
3013 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3014 the mode mask, as it is returned by the macro, it will
3015 definitely be less than the actual mode mask. */
3016 && ((GET_CODE (size) == CONST_INT
3017 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3018 <= (GET_MODE_MASK (mode) >> 1)))
3019 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3020 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3021 || (*pred) (object, BLKmode))
3022 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3023 || (*pred) (opalign, VOIDmode)))
3026 rtx last = get_last_insn ();
3029 op1 = convert_to_mode (mode, size, 1);
3030 pred = insn_data[(int) code].operand[1].predicate;
3031 if (pred != 0 && ! (*pred) (op1, mode))
3032 op1 = copy_to_mode_reg (mode, op1);
3034 pat = GEN_FCN ((int) code) (object, op1, opalign);
3041 delete_insns_since (last);
3048 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3049 Return the return value of memset, 0 otherwise. */
3052 clear_storage_via_libcall (object, size)
3055 tree call_expr, arg_list, fn, object_tree, size_tree;
3056 enum machine_mode size_mode;
3059 /* OBJECT or SIZE may have been passed through protect_from_queue.
3061 It is unsafe to save the value generated by protect_from_queue
3062 and reuse it later. Consider what happens if emit_queue is
3063 called before the return value from protect_from_queue is used.
3065 Expansion of the CALL_EXPR below will call emit_queue before
3066 we are finished emitting RTL for argument setup. So if we are
3067 not careful we could get the wrong value for an argument.
3069 To avoid this problem we go ahead and emit code to copy OBJECT
3070 and SIZE into new pseudos. We can then place those new pseudos
3071 into an RTL_EXPR and use them later, even after a call to
3074 Note this is not strictly needed for library calls since they
3075 do not call emit_queue before loading their arguments. However,
3076 we may need to have library calls call emit_queue in the future
3077 since failing to do so could cause problems for targets which
3078 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3080 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3082 if (TARGET_MEM_FUNCTIONS)
3083 size_mode = TYPE_MODE (sizetype);
3085 size_mode = TYPE_MODE (unsigned_type_node);
3086 size = convert_to_mode (size_mode, size, 1);
3087 size = copy_to_mode_reg (size_mode, size);
3089 /* It is incorrect to use the libcall calling conventions to call
3090 memset in this context. This could be a user call to memset and
3091 the user may wish to examine the return value from memset. For
3092 targets where libcalls and normal calls have different conventions
3093 for returning pointers, we could end up generating incorrect code.
3095 For convenience, we generate the call to bzero this way as well. */
3097 object_tree = make_tree (ptr_type_node, object);
3098 if (TARGET_MEM_FUNCTIONS)
3099 size_tree = make_tree (sizetype, size);
3101 size_tree = make_tree (unsigned_type_node, size);
3103 fn = clear_storage_libcall_fn (true);
3104 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3105 if (TARGET_MEM_FUNCTIONS)
3106 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3107 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3109 /* Now we have to build up the CALL_EXPR itself. */
3110 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3111 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3112 call_expr, arg_list, NULL_TREE);
3113 TREE_SIDE_EFFECTS (call_expr) = 1;
3115 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3117 /* If we are initializing a readonly value, show the above call
3118 clobbered it. Otherwise, a load from it may erroneously be
3119 hoisted from a loop. */
3120 if (RTX_UNCHANGING_P (object))
3121 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3123 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3126 /* A subroutine of clear_storage_via_libcall. Create the tree node
3127 for the function we use for block clears. The first time FOR_CALL
3128 is true, we call assemble_external. */
3130 static GTY(()) tree block_clear_fn;
3133 init_block_clear_fn (asmspec)
3134 const char *asmspec;
3136 if (!block_clear_fn)
3140 if (TARGET_MEM_FUNCTIONS)
3142 fn = get_identifier ("memset");
3143 args = build_function_type_list (ptr_type_node, ptr_type_node,
3144 integer_type_node, sizetype,
3149 fn = get_identifier ("bzero");
3150 args = build_function_type_list (void_type_node, ptr_type_node,
3151 unsigned_type_node, NULL_TREE);
3154 fn = build_decl (FUNCTION_DECL, fn, args);
3155 DECL_EXTERNAL (fn) = 1;
3156 TREE_PUBLIC (fn) = 1;
3157 DECL_ARTIFICIAL (fn) = 1;
3158 TREE_NOTHROW (fn) = 1;
3160 block_clear_fn = fn;
3165 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3166 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3171 clear_storage_libcall_fn (for_call)
3174 static bool emitted_extern;
3176 if (!block_clear_fn)
3177 init_block_clear_fn (NULL);
3179 if (for_call && !emitted_extern)
3181 emitted_extern = true;
3182 make_decl_rtl (block_clear_fn, NULL);
3183 assemble_external (block_clear_fn);
3186 return block_clear_fn;
3189 /* Generate code to copy Y into X.
3190 Both Y and X must have the same mode, except that
3191 Y can be a constant with VOIDmode.
3192 This mode cannot be BLKmode; use emit_block_move for that.
3194 Return the last instruction emitted. */
3197 emit_move_insn (x, y)
3200 enum machine_mode mode = GET_MODE (x);
3201 rtx y_cst = NULL_RTX;
3204 x = protect_from_queue (x, 1);
3205 y = protect_from_queue (y, 0);
3207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3210 /* Never force constant_p_rtx to memory. */
3211 if (GET_CODE (y) == CONSTANT_P_RTX)
3213 else if (CONSTANT_P (y))
3216 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3217 && (last_insn = compress_float_constant (x, y)))
3220 if (!LEGITIMATE_CONSTANT_P (y))
3223 y = force_const_mem (mode, y);
3225 /* If the target's cannot_force_const_mem prevented the spill,
3226 assume that the target's move expanders will also take care
3227 of the non-legitimate constant. */
3233 /* If X or Y are memory references, verify that their addresses are valid
3235 if (GET_CODE (x) == MEM
3236 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3237 && ! push_operand (x, GET_MODE (x)))
3239 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3240 x = validize_mem (x);
3242 if (GET_CODE (y) == MEM
3243 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3245 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3246 y = validize_mem (y);
3248 if (mode == BLKmode)
3251 last_insn = emit_move_insn_1 (x, y);
3253 if (y_cst && GET_CODE (x) == REG)
3254 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3259 /* Low level part of emit_move_insn.
3260 Called just like emit_move_insn, but assumes X and Y
3261 are basically valid. */
3264 emit_move_insn_1 (x, y)
3267 enum machine_mode mode = GET_MODE (x);
3268 enum machine_mode submode;
3269 enum mode_class class = GET_MODE_CLASS (mode);
3271 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3274 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3276 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3278 /* Expand complex moves by moving real part and imag part, if possible. */
3279 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3280 && BLKmode != (submode = GET_MODE_INNER (mode))
3281 && (mov_optab->handlers[(int) submode].insn_code
3282 != CODE_FOR_nothing))
3284 /* Don't split destination if it is a stack push. */
3285 int stack = push_operand (x, GET_MODE (x));
3287 #ifdef PUSH_ROUNDING
3288 /* In case we output to the stack, but the size is smaller machine can
3289 push exactly, we need to use move instructions. */
3291 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3292 != GET_MODE_SIZE (submode)))
3295 HOST_WIDE_INT offset1, offset2;
3297 /* Do not use anti_adjust_stack, since we don't want to update
3298 stack_pointer_delta. */
3299 temp = expand_binop (Pmode,
3300 #ifdef STACK_GROWS_DOWNWARD
3308 (GET_MODE_SIZE (GET_MODE (x)))),
3309 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3311 if (temp != stack_pointer_rtx)
3312 emit_move_insn (stack_pointer_rtx, temp);
3314 #ifdef STACK_GROWS_DOWNWARD
3316 offset2 = GET_MODE_SIZE (submode);
3318 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3319 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3320 + GET_MODE_SIZE (submode));
3323 emit_move_insn (change_address (x, submode,
3324 gen_rtx_PLUS (Pmode,
3326 GEN_INT (offset1))),
3327 gen_realpart (submode, y));
3328 emit_move_insn (change_address (x, submode,
3329 gen_rtx_PLUS (Pmode,
3331 GEN_INT (offset2))),
3332 gen_imagpart (submode, y));
3336 /* If this is a stack, push the highpart first, so it
3337 will be in the argument order.
3339 In that case, change_address is used only to convert
3340 the mode, not to change the address. */
3343 /* Note that the real part always precedes the imag part in memory
3344 regardless of machine's endianness. */
3345 #ifdef STACK_GROWS_DOWNWARD
3346 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3347 (gen_rtx_MEM (submode, XEXP (x, 0)),
3348 gen_imagpart (submode, y)));
3349 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3350 (gen_rtx_MEM (submode, XEXP (x, 0)),
3351 gen_realpart (submode, y)));
3353 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3354 (gen_rtx_MEM (submode, XEXP (x, 0)),
3355 gen_realpart (submode, y)));
3356 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3357 (gen_rtx_MEM (submode, XEXP (x, 0)),
3358 gen_imagpart (submode, y)));
3363 rtx realpart_x, realpart_y;
3364 rtx imagpart_x, imagpart_y;
3366 /* If this is a complex value with each part being smaller than a
3367 word, the usual calling sequence will likely pack the pieces into
3368 a single register. Unfortunately, SUBREG of hard registers only
3369 deals in terms of words, so we have a problem converting input
3370 arguments to the CONCAT of two registers that is used elsewhere
3371 for complex values. If this is before reload, we can copy it into
3372 memory and reload. FIXME, we should see about using extract and
3373 insert on integer registers, but complex short and complex char
3374 variables should be rarely used. */
3375 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3376 && (reload_in_progress | reload_completed) == 0)
3379 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3381 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3383 if (packed_dest_p || packed_src_p)
3385 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3386 ? MODE_FLOAT : MODE_INT);
3388 enum machine_mode reg_mode
3389 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3391 if (reg_mode != BLKmode)
3393 rtx mem = assign_stack_temp (reg_mode,
3394 GET_MODE_SIZE (mode), 0);
3395 rtx cmem = adjust_address (mem, mode, 0);
3398 = N_("function using short complex types cannot be inline");
3402 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3404 emit_move_insn_1 (cmem, y);
3405 return emit_move_insn_1 (sreg, mem);
3409 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3411 emit_move_insn_1 (mem, sreg);
3412 return emit_move_insn_1 (x, cmem);
3418 realpart_x = gen_realpart (submode, x);
3419 realpart_y = gen_realpart (submode, y);
3420 imagpart_x = gen_imagpart (submode, x);
3421 imagpart_y = gen_imagpart (submode, y);
3423 /* Show the output dies here. This is necessary for SUBREGs
3424 of pseudos since we cannot track their lifetimes correctly;
3425 hard regs shouldn't appear here except as return values.
3426 We never want to emit such a clobber after reload. */
3428 && ! (reload_in_progress || reload_completed)
3429 && (GET_CODE (realpart_x) == SUBREG
3430 || GET_CODE (imagpart_x) == SUBREG))
3431 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3433 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3434 (realpart_x, realpart_y));
3435 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3436 (imagpart_x, imagpart_y));
3439 return get_last_insn ();
3442 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3443 find a mode to do it in. If we have a movcc, use it. Otherwise,
3444 find the MODE_INT mode of the same width. */
3445 else if (GET_MODE_CLASS (mode) == MODE_CC
3446 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3448 enum insn_code insn_code;
3449 enum machine_mode tmode = VOIDmode;
3453 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3456 for (tmode = QImode; tmode != VOIDmode;
3457 tmode = GET_MODE_WIDER_MODE (tmode))
3458 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3461 if (tmode == VOIDmode)
3464 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3465 may call change_address which is not appropriate if we were
3466 called when a reload was in progress. We don't have to worry
3467 about changing the address since the size in bytes is supposed to
3468 be the same. Copy the MEM to change the mode and move any
3469 substitutions from the old MEM to the new one. */
3471 if (reload_in_progress)
3473 x = gen_lowpart_common (tmode, x1);
3474 if (x == 0 && GET_CODE (x1) == MEM)
3476 x = adjust_address_nv (x1, tmode, 0);
3477 copy_replacements (x1, x);
3480 y = gen_lowpart_common (tmode, y1);
3481 if (y == 0 && GET_CODE (y1) == MEM)
3483 y = adjust_address_nv (y1, tmode, 0);
3484 copy_replacements (y1, y);
3489 x = gen_lowpart (tmode, x);
3490 y = gen_lowpart (tmode, y);
3493 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3494 return emit_insn (GEN_FCN (insn_code) (x, y));
3497 /* This will handle any multi-word or full-word mode that lacks a move_insn
3498 pattern. However, you will get better code if you define such patterns,
3499 even if they must turn into multiple assembler instructions. */
3500 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3507 #ifdef PUSH_ROUNDING
3509 /* If X is a push on the stack, do the push now and replace
3510 X with a reference to the stack pointer. */
3511 if (push_operand (x, GET_MODE (x)))
3516 /* Do not use anti_adjust_stack, since we don't want to update
3517 stack_pointer_delta. */
3518 temp = expand_binop (Pmode,
3519 #ifdef STACK_GROWS_DOWNWARD
3527 (GET_MODE_SIZE (GET_MODE (x)))),
3528 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3530 if (temp != stack_pointer_rtx)
3531 emit_move_insn (stack_pointer_rtx, temp);
3533 code = GET_CODE (XEXP (x, 0));
3535 /* Just hope that small offsets off SP are OK. */
3536 if (code == POST_INC)
3537 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3538 GEN_INT (-((HOST_WIDE_INT)
3539 GET_MODE_SIZE (GET_MODE (x)))));
3540 else if (code == POST_DEC)
3541 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3542 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3544 temp = stack_pointer_rtx;
3546 x = change_address (x, VOIDmode, temp);
3550 /* If we are in reload, see if either operand is a MEM whose address
3551 is scheduled for replacement. */
3552 if (reload_in_progress && GET_CODE (x) == MEM
3553 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3554 x = replace_equiv_address_nv (x, inner);
3555 if (reload_in_progress && GET_CODE (y) == MEM
3556 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3557 y = replace_equiv_address_nv (y, inner);
3563 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3566 rtx xpart = operand_subword (x, i, 1, mode);
3567 rtx ypart = operand_subword (y, i, 1, mode);
3569 /* If we can't get a part of Y, put Y into memory if it is a
3570 constant. Otherwise, force it into a register. If we still
3571 can't get a part of Y, abort. */
3572 if (ypart == 0 && CONSTANT_P (y))
3574 y = force_const_mem (mode, y);
3575 ypart = operand_subword (y, i, 1, mode);
3577 else if (ypart == 0)
3578 ypart = operand_subword_force (y, i, mode);
3580 if (xpart == 0 || ypart == 0)
3583 need_clobber |= (GET_CODE (xpart) == SUBREG);
3585 last_insn = emit_move_insn (xpart, ypart);
3591 /* Show the output dies here. This is necessary for SUBREGs
3592 of pseudos since we cannot track their lifetimes correctly;
3593 hard regs shouldn't appear here except as return values.
3594 We never want to emit such a clobber after reload. */
3596 && ! (reload_in_progress || reload_completed)
3597 && need_clobber != 0)
3598 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3608 /* If Y is representable exactly in a narrower mode, and the target can
3609 perform the extension directly from constant or memory, then emit the
3610 move as an extension. */
3613 compress_float_constant (x, y)
3616 enum machine_mode dstmode = GET_MODE (x);
3617 enum machine_mode orig_srcmode = GET_MODE (y);
3618 enum machine_mode srcmode;
3621 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3623 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3624 srcmode != orig_srcmode;
3625 srcmode = GET_MODE_WIDER_MODE (srcmode))
3628 rtx trunc_y, last_insn;
3630 /* Skip if the target can't extend this way. */
3631 ic = can_extend_p (dstmode, srcmode, 0);
3632 if (ic == CODE_FOR_nothing)
3635 /* Skip if the narrowed value isn't exact. */
3636 if (! exact_real_truncate (srcmode, &r))
3639 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3641 if (LEGITIMATE_CONSTANT_P (trunc_y))
3643 /* Skip if the target needs extra instructions to perform
3645 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3648 else if (float_extend_from_mem[dstmode][srcmode])
3649 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3653 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3654 last_insn = get_last_insn ();
3656 if (GET_CODE (x) == REG)
3657 REG_NOTES (last_insn)
3658 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3666 /* Pushing data onto the stack. */
3668 /* Push a block of length SIZE (perhaps variable)
3669 and return an rtx to address the beginning of the block.
3670 Note that it is not possible for the value returned to be a QUEUED.
3671 The value may be virtual_outgoing_args_rtx.
3673 EXTRA is the number of bytes of padding to push in addition to SIZE.
3674 BELOW nonzero means this padding comes at low addresses;
3675 otherwise, the padding comes at high addresses. */
3678 push_block (size, extra, below)
3684 size = convert_modes (Pmode, ptr_mode, size, 1);
3685 if (CONSTANT_P (size))
3686 anti_adjust_stack (plus_constant (size, extra));
3687 else if (GET_CODE (size) == REG && extra == 0)
3688 anti_adjust_stack (size);
3691 temp = copy_to_mode_reg (Pmode, size);
3693 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3694 temp, 0, OPTAB_LIB_WIDEN);
3695 anti_adjust_stack (temp);
3698 #ifndef STACK_GROWS_DOWNWARD
3704 temp = virtual_outgoing_args_rtx;
3705 if (extra != 0 && below)
3706 temp = plus_constant (temp, extra);
3710 if (GET_CODE (size) == CONST_INT)
3711 temp = plus_constant (virtual_outgoing_args_rtx,
3712 -INTVAL (size) - (below ? 0 : extra));
3713 else if (extra != 0 && !below)
3714 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3715 negate_rtx (Pmode, plus_constant (size, extra)));
3717 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3718 negate_rtx (Pmode, size));
3721 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3724 #ifdef PUSH_ROUNDING
3726 /* Emit single push insn. */
3729 emit_single_push_insn (mode, x, type)
3731 enum machine_mode mode;
3735 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3737 enum insn_code icode;
3738 insn_operand_predicate_fn pred;
3740 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3741 /* If there is push pattern, use it. Otherwise try old way of throwing
3742 MEM representing push operation to move expander. */
3743 icode = push_optab->handlers[(int) mode].insn_code;
3744 if (icode != CODE_FOR_nothing)
3746 if (((pred = insn_data[(int) icode].operand[0].predicate)
3747 && !((*pred) (x, mode))))
3748 x = force_reg (mode, x);
3749 emit_insn (GEN_FCN (icode) (x));
3752 if (GET_MODE_SIZE (mode) == rounded_size)
3753 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3756 #ifdef STACK_GROWS_DOWNWARD
3757 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3758 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3760 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3761 GEN_INT (rounded_size));
3763 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3766 dest = gen_rtx_MEM (mode, dest_addr);
3770 set_mem_attributes (dest, type, 1);
3772 if (flag_optimize_sibling_calls)
3773 /* Function incoming arguments may overlap with sibling call
3774 outgoing arguments and we cannot allow reordering of reads
3775 from function arguments with stores to outgoing arguments
3776 of sibling calls. */
3777 set_mem_alias_set (dest, 0);
3779 emit_move_insn (dest, x);
3783 /* Generate code to push X onto the stack, assuming it has mode MODE and
3785 MODE is redundant except when X is a CONST_INT (since they don't
3787 SIZE is an rtx for the size of data to be copied (in bytes),
3788 needed only if X is BLKmode.
3790 ALIGN (in bits) is maximum alignment we can assume.
3792 If PARTIAL and REG are both nonzero, then copy that many of the first
3793 words of X into registers starting with REG, and push the rest of X.
3794 The amount of space pushed is decreased by PARTIAL words,
3795 rounded *down* to a multiple of PARM_BOUNDARY.
3796 REG must be a hard register in this case.
3797 If REG is zero but PARTIAL is not, take any all others actions for an
3798 argument partially in registers, but do not actually load any
3801 EXTRA is the amount in bytes of extra space to leave next to this arg.
3802 This is ignored if an argument block has already been allocated.
3804 On a machine that lacks real push insns, ARGS_ADDR is the address of
3805 the bottom of the argument block for this call. We use indexing off there
3806 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3807 argument block has not been preallocated.
3809 ARGS_SO_FAR is the size of args previously pushed for this call.
3811 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3812 for arguments passed in registers. If nonzero, it will be the number
3813 of bytes required. */
3816 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3817 args_addr, args_so_far, reg_parm_stack_space,
3820 enum machine_mode mode;
3829 int reg_parm_stack_space;
3833 enum direction stack_direction
3834 #ifdef STACK_GROWS_DOWNWARD
3840 /* Decide where to pad the argument: `downward' for below,
3841 `upward' for above, or `none' for don't pad it.
3842 Default is below for small data on big-endian machines; else above. */
3843 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3845 /* Invert direction if stack is post-decrement.
3847 if (STACK_PUSH_CODE == POST_DEC)
3848 if (where_pad != none)
3849 where_pad = (where_pad == downward ? upward : downward);
3851 xinner = x = protect_from_queue (x, 0);
3853 if (mode == BLKmode)
3855 /* Copy a block into the stack, entirely or partially. */
3858 int used = partial * UNITS_PER_WORD;
3859 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3867 /* USED is now the # of bytes we need not copy to the stack
3868 because registers will take care of them. */
3871 xinner = adjust_address (xinner, BLKmode, used);
3873 /* If the partial register-part of the arg counts in its stack size,
3874 skip the part of stack space corresponding to the registers.
3875 Otherwise, start copying to the beginning of the stack space,
3876 by setting SKIP to 0. */
3877 skip = (reg_parm_stack_space == 0) ? 0 : used;
3879 #ifdef PUSH_ROUNDING
3880 /* Do it with several push insns if that doesn't take lots of insns
3881 and if there is no difficulty with push insns that skip bytes
3882 on the stack for alignment purposes. */
3885 && GET_CODE (size) == CONST_INT
3887 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3888 /* Here we avoid the case of a structure whose weak alignment
3889 forces many pushes of a small amount of data,
3890 and such small pushes do rounding that causes trouble. */
3891 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3892 || align >= BIGGEST_ALIGNMENT
3893 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3894 == (align / BITS_PER_UNIT)))
3895 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3897 /* Push padding now if padding above and stack grows down,
3898 or if padding below and stack grows up.
3899 But if space already allocated, this has already been done. */
3900 if (extra && args_addr == 0
3901 && where_pad != none && where_pad != stack_direction)
3902 anti_adjust_stack (GEN_INT (extra));
3904 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3907 #endif /* PUSH_ROUNDING */
3911 /* Otherwise make space on the stack and copy the data
3912 to the address of that space. */
3914 /* Deduct words put into registers from the size we must copy. */
3917 if (GET_CODE (size) == CONST_INT)
3918 size = GEN_INT (INTVAL (size) - used);
3920 size = expand_binop (GET_MODE (size), sub_optab, size,
3921 GEN_INT (used), NULL_RTX, 0,
3925 /* Get the address of the stack space.
3926 In this case, we do not deal with EXTRA separately.
3927 A single stack adjust will do. */
3930 temp = push_block (size, extra, where_pad == downward);
3933 else if (GET_CODE (args_so_far) == CONST_INT)
3934 temp = memory_address (BLKmode,
3935 plus_constant (args_addr,
3936 skip + INTVAL (args_so_far)));
3938 temp = memory_address (BLKmode,
3939 plus_constant (gen_rtx_PLUS (Pmode,
3944 if (!ACCUMULATE_OUTGOING_ARGS)
3946 /* If the source is referenced relative to the stack pointer,
3947 copy it to another register to stabilize it. We do not need
3948 to do this if we know that we won't be changing sp. */
3950 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3951 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3952 temp = copy_to_reg (temp);
3955 target = gen_rtx_MEM (BLKmode, temp);
3959 set_mem_attributes (target, type, 1);
3960 /* Function incoming arguments may overlap with sibling call
3961 outgoing arguments and we cannot allow reordering of reads
3962 from function arguments with stores to outgoing arguments
3963 of sibling calls. */
3964 set_mem_alias_set (target, 0);
3967 /* ALIGN may well be better aligned than TYPE, e.g. due to
3968 PARM_BOUNDARY. Assume the caller isn't lying. */
3969 set_mem_align (target, align);
3971 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3974 else if (partial > 0)
3976 /* Scalar partly in registers. */
3978 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3981 /* # words of start of argument
3982 that we must make space for but need not store. */
3983 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3984 int args_offset = INTVAL (args_so_far);
3987 /* Push padding now if padding above and stack grows down,
3988 or if padding below and stack grows up.
3989 But if space already allocated, this has already been done. */
3990 if (extra && args_addr == 0
3991 && where_pad != none && where_pad != stack_direction)
3992 anti_adjust_stack (GEN_INT (extra));
3994 /* If we make space by pushing it, we might as well push
3995 the real data. Otherwise, we can leave OFFSET nonzero
3996 and leave the space uninitialized. */
4000 /* Now NOT_STACK gets the number of words that we don't need to
4001 allocate on the stack. */
4002 not_stack = partial - offset;
4004 /* If the partial register-part of the arg counts in its stack size,
4005 skip the part of stack space corresponding to the registers.
4006 Otherwise, start copying to the beginning of the stack space,
4007 by setting SKIP to 0. */
4008 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4010 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4011 x = validize_mem (force_const_mem (mode, x));
4013 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4014 SUBREGs of such registers are not allowed. */
4015 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4016 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4017 x = copy_to_reg (x);
4019 /* Loop over all the words allocated on the stack for this arg. */
4020 /* We can do it by words, because any scalar bigger than a word
4021 has a size a multiple of a word. */
4022 #ifndef PUSH_ARGS_REVERSED
4023 for (i = not_stack; i < size; i++)
4025 for (i = size - 1; i >= not_stack; i--)
4027 if (i >= not_stack + offset)
4028 emit_push_insn (operand_subword_force (x, i, mode),
4029 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4031 GEN_INT (args_offset + ((i - not_stack + skip)
4033 reg_parm_stack_space, alignment_pad);
4040 /* Push padding now if padding above and stack grows down,
4041 or if padding below and stack grows up.
4042 But if space already allocated, this has already been done. */
4043 if (extra && args_addr == 0
4044 && where_pad != none && where_pad != stack_direction)
4045 anti_adjust_stack (GEN_INT (extra));
4047 #ifdef PUSH_ROUNDING
4048 if (args_addr == 0 && PUSH_ARGS)
4049 emit_single_push_insn (mode, x, type);
4053 if (GET_CODE (args_so_far) == CONST_INT)
4055 = memory_address (mode,
4056 plus_constant (args_addr,
4057 INTVAL (args_so_far)));
4059 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4061 dest = gen_rtx_MEM (mode, addr);
4064 set_mem_attributes (dest, type, 1);
4065 /* Function incoming arguments may overlap with sibling call
4066 outgoing arguments and we cannot allow reordering of reads
4067 from function arguments with stores to outgoing arguments
4068 of sibling calls. */
4069 set_mem_alias_set (dest, 0);
4072 emit_move_insn (dest, x);
4076 /* If part should go in registers, copy that part
4077 into the appropriate registers. Do this now, at the end,
4078 since mem-to-mem copies above may do function calls. */
4079 if (partial > 0 && reg != 0)
4081 /* Handle calls that pass values in multiple non-contiguous locations.
4082 The Irix 6 ABI has examples of this. */
4083 if (GET_CODE (reg) == PARALLEL)
4084 emit_group_load (reg, x, -1); /* ??? size? */
4086 move_block_to_reg (REGNO (reg), x, partial, mode);
4089 if (extra && args_addr == 0 && where_pad == stack_direction)
4090 anti_adjust_stack (GEN_INT (extra));
4092 if (alignment_pad && args_addr == 0)
4093 anti_adjust_stack (alignment_pad);
4096 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4104 /* Only registers can be subtargets. */
4105 || GET_CODE (x) != REG
4106 /* If the register is readonly, it can't be set more than once. */
4107 || RTX_UNCHANGING_P (x)
4108 /* Don't use hard regs to avoid extending their life. */
4109 || REGNO (x) < FIRST_PSEUDO_REGISTER
4110 /* Avoid subtargets inside loops,
4111 since they hide some invariant expressions. */
4112 || preserve_subexpressions_p ())
4116 /* Expand an assignment that stores the value of FROM into TO.
4117 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4118 (This may contain a QUEUED rtx;
4119 if the value is constant, this rtx is a constant.)
4120 Otherwise, the returned value is NULL_RTX.
4122 SUGGEST_REG is no longer actually used.
4123 It used to mean, copy the value through a register
4124 and return that register, if that is possible.
4125 We now use WANT_VALUE to decide whether to do this. */
4128 expand_assignment (to, from, want_value, suggest_reg)
4131 int suggest_reg ATTRIBUTE_UNUSED;
4136 /* Don't crash if the lhs of the assignment was erroneous. */
4138 if (TREE_CODE (to) == ERROR_MARK)
4140 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4141 return want_value ? result : NULL_RTX;
4144 /* Assignment of a structure component needs special treatment
4145 if the structure component's rtx is not simply a MEM.
4146 Assignment of an array element at a constant index, and assignment of
4147 an array element in an unaligned packed structure field, has the same
4150 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4151 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4152 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4154 enum machine_mode mode1;
4155 HOST_WIDE_INT bitsize, bitpos;
4163 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4164 &unsignedp, &volatilep);
4166 /* If we are going to use store_bit_field and extract_bit_field,
4167 make sure to_rtx will be safe for multiple use. */
4169 if (mode1 == VOIDmode && want_value)
4170 tem = stabilize_reference (tem);
4172 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4176 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4178 if (GET_CODE (to_rtx) != MEM)
4181 #ifdef POINTERS_EXTEND_UNSIGNED
4182 if (GET_MODE (offset_rtx) != Pmode)
4183 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4185 if (GET_MODE (offset_rtx) != ptr_mode)
4186 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4189 /* A constant address in TO_RTX can have VOIDmode, we must not try
4190 to call force_reg for that case. Avoid that case. */
4191 if (GET_CODE (to_rtx) == MEM
4192 && GET_MODE (to_rtx) == BLKmode
4193 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4195 && (bitpos % bitsize) == 0
4196 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4197 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4199 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4203 to_rtx = offset_address (to_rtx, offset_rtx,
4204 highest_pow2_factor_for_type (TREE_TYPE (to),
4208 if (GET_CODE (to_rtx) == MEM)
4210 /* If the field is at offset zero, we could have been given the
4211 DECL_RTX of the parent struct. Don't munge it. */
4212 to_rtx = shallow_copy_rtx (to_rtx);
4214 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4217 /* Deal with volatile and readonly fields. The former is only done
4218 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4219 if (volatilep && GET_CODE (to_rtx) == MEM)
4221 if (to_rtx == orig_to_rtx)
4222 to_rtx = copy_rtx (to_rtx);
4223 MEM_VOLATILE_P (to_rtx) = 1;
4226 if (TREE_CODE (to) == COMPONENT_REF
4227 && TREE_READONLY (TREE_OPERAND (to, 1)))
4229 if (to_rtx == orig_to_rtx)
4230 to_rtx = copy_rtx (to_rtx);
4231 RTX_UNCHANGING_P (to_rtx) = 1;
4234 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4236 if (to_rtx == orig_to_rtx)
4237 to_rtx = copy_rtx (to_rtx);
4238 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4241 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4243 /* Spurious cast for HPUX compiler. */
4244 ? ((enum machine_mode)
4245 TYPE_MODE (TREE_TYPE (to)))
4247 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4249 preserve_temp_slots (result);
4253 /* If the value is meaningful, convert RESULT to the proper mode.
4254 Otherwise, return nothing. */
4255 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4256 TYPE_MODE (TREE_TYPE (from)),
4258 TREE_UNSIGNED (TREE_TYPE (to)))
4262 /* If the rhs is a function call and its value is not an aggregate,
4263 call the function before we start to compute the lhs.
4264 This is needed for correct code for cases such as
4265 val = setjmp (buf) on machines where reference to val
4266 requires loading up part of an address in a separate insn.
4268 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4269 since it might be a promoted variable where the zero- or sign- extension
4270 needs to be done. Handling this in the normal way is safe because no
4271 computation is done before the call. */
4272 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4273 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4274 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4275 && GET_CODE (DECL_RTL (to)) == REG))
4280 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4282 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4284 /* Handle calls that return values in multiple non-contiguous locations.
4285 The Irix 6 ABI has examples of this. */
4286 if (GET_CODE (to_rtx) == PARALLEL)
4287 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4288 else if (GET_MODE (to_rtx) == BLKmode)
4289 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4292 #ifdef POINTERS_EXTEND_UNSIGNED
4293 if (POINTER_TYPE_P (TREE_TYPE (to))
4294 && GET_MODE (to_rtx) != GET_MODE (value))
4295 value = convert_memory_address (GET_MODE (to_rtx), value);
4297 emit_move_insn (to_rtx, value);
4299 preserve_temp_slots (to_rtx);
4302 return want_value ? to_rtx : NULL_RTX;
4305 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4306 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4309 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4311 /* Don't move directly into a return register. */
4312 if (TREE_CODE (to) == RESULT_DECL
4313 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4318 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4320 if (GET_CODE (to_rtx) == PARALLEL)
4321 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4323 emit_move_insn (to_rtx, temp);
4325 preserve_temp_slots (to_rtx);
4328 return want_value ? to_rtx : NULL_RTX;
4331 /* In case we are returning the contents of an object which overlaps
4332 the place the value is being stored, use a safe function when copying
4333 a value through a pointer into a structure value return block. */
4334 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4335 && current_function_returns_struct
4336 && !current_function_returns_pcc_struct)
4341 size = expr_size (from);
4342 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4344 if (TARGET_MEM_FUNCTIONS)
4345 emit_library_call (memmove_libfunc, LCT_NORMAL,
4346 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4347 XEXP (from_rtx, 0), Pmode,
4348 convert_to_mode (TYPE_MODE (sizetype),
4349 size, TREE_UNSIGNED (sizetype)),
4350 TYPE_MODE (sizetype));
4352 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4353 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4354 XEXP (to_rtx, 0), Pmode,
4355 convert_to_mode (TYPE_MODE (integer_type_node),
4357 TREE_UNSIGNED (integer_type_node)),
4358 TYPE_MODE (integer_type_node));
4360 preserve_temp_slots (to_rtx);
4363 return want_value ? to_rtx : NULL_RTX;
4366 /* Compute FROM and store the value in the rtx we got. */
4369 result = store_expr (from, to_rtx, want_value);
4370 preserve_temp_slots (result);
4373 return want_value ? result : NULL_RTX;
4376 /* Generate code for computing expression EXP,
4377 and storing the value into TARGET.
4378 TARGET may contain a QUEUED rtx.
4380 If WANT_VALUE & 1 is nonzero, return a copy of the value
4381 not in TARGET, so that we can be sure to use the proper
4382 value in a containing expression even if TARGET has something
4383 else stored in it. If possible, we copy the value through a pseudo
4384 and return that pseudo. Or, if the value is constant, we try to
4385 return the constant. In some cases, we return a pseudo
4386 copied *from* TARGET.
4388 If the mode is BLKmode then we may return TARGET itself.
4389 It turns out that in BLKmode it doesn't cause a problem.
4390 because C has no operators that could combine two different
4391 assignments into the same BLKmode object with different values
4392 with no sequence point. Will other languages need this to
4395 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4396 to catch quickly any cases where the caller uses the value
4397 and fails to set WANT_VALUE.
4399 If WANT_VALUE & 2 is set, this is a store into a call param on the
4400 stack, and block moves may need to be treated specially. */
4403 store_expr (exp, target, want_value)
4409 int dont_return_target = 0;
4410 int dont_store_target = 0;
4412 if (VOID_TYPE_P (TREE_TYPE (exp)))
4414 /* C++ can generate ?: expressions with a throw expression in one
4415 branch and an rvalue in the other. Here, we resolve attempts to
4416 store the throw expression's nonexistant result. */
4419 expand_expr (exp, const0_rtx, VOIDmode, 0);
4422 if (TREE_CODE (exp) == COMPOUND_EXPR)
4424 /* Perform first part of compound expression, then assign from second
4426 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4427 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4429 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4431 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4433 /* For conditional expression, get safe form of the target. Then
4434 test the condition, doing the appropriate assignment on either
4435 side. This avoids the creation of unnecessary temporaries.
4436 For non-BLKmode, it is more efficient not to do this. */
4438 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4441 target = protect_from_queue (target, 1);
4443 do_pending_stack_adjust ();
4445 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4446 start_cleanup_deferral ();
4447 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4448 end_cleanup_deferral ();
4450 emit_jump_insn (gen_jump (lab2));
4453 start_cleanup_deferral ();
4454 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4455 end_cleanup_deferral ();
4460 return want_value & 1 ? target : NULL_RTX;
4462 else if (queued_subexp_p (target))
4463 /* If target contains a postincrement, let's not risk
4464 using it as the place to generate the rhs. */
4466 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4468 /* Expand EXP into a new pseudo. */
4469 temp = gen_reg_rtx (GET_MODE (target));
4470 temp = expand_expr (exp, temp, GET_MODE (target),
4472 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4475 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4477 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4479 /* If target is volatile, ANSI requires accessing the value
4480 *from* the target, if it is accessed. So make that happen.
4481 In no case return the target itself. */
4482 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4483 dont_return_target = 1;
4485 else if ((want_value & 1) != 0
4486 && GET_CODE (target) == MEM
4487 && ! MEM_VOLATILE_P (target)
4488 && GET_MODE (target) != BLKmode)
4489 /* If target is in memory and caller wants value in a register instead,
4490 arrange that. Pass TARGET as target for expand_expr so that,
4491 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4492 We know expand_expr will not use the target in that case.
4493 Don't do this if TARGET is volatile because we are supposed
4494 to write it and then read it. */
4496 temp = expand_expr (exp, target, GET_MODE (target),
4497 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4498 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4500 /* If TEMP is already in the desired TARGET, only copy it from
4501 memory and don't store it there again. */
4503 || (rtx_equal_p (temp, target)
4504 && ! side_effects_p (temp) && ! side_effects_p (target)))
4505 dont_store_target = 1;
4506 temp = copy_to_reg (temp);
4508 dont_return_target = 1;
4510 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4511 /* If this is a scalar in a register that is stored in a wider mode
4512 than the declared mode, compute the result into its declared mode
4513 and then convert to the wider mode. Our value is the computed
4516 rtx inner_target = 0;
4518 /* If we don't want a value, we can do the conversion inside EXP,
4519 which will often result in some optimizations. Do the conversion
4520 in two steps: first change the signedness, if needed, then
4521 the extend. But don't do this if the type of EXP is a subtype
4522 of something else since then the conversion might involve
4523 more than just converting modes. */
4524 if ((want_value & 1) == 0
4525 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4526 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4528 if (TREE_UNSIGNED (TREE_TYPE (exp))
4529 != SUBREG_PROMOTED_UNSIGNED_P (target))
4531 ((*lang_hooks.types.signed_or_unsigned_type)
4532 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4534 exp = convert ((*lang_hooks.types.type_for_mode)
4535 (GET_MODE (SUBREG_REG (target)),
4536 SUBREG_PROMOTED_UNSIGNED_P (target)),
4539 inner_target = SUBREG_REG (target);
4542 temp = expand_expr (exp, inner_target, VOIDmode,
4543 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4545 /* If TEMP is a MEM and we want a result value, make the access
4546 now so it gets done only once. Strictly speaking, this is
4547 only necessary if the MEM is volatile, or if the address
4548 overlaps TARGET. But not performing the load twice also
4549 reduces the amount of rtl we generate and then have to CSE. */
4550 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4551 temp = copy_to_reg (temp);
4553 /* If TEMP is a VOIDmode constant, use convert_modes to make
4554 sure that we properly convert it. */
4555 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4557 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4558 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4559 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4560 GET_MODE (target), temp,
4561 SUBREG_PROMOTED_UNSIGNED_P (target));
4564 convert_move (SUBREG_REG (target), temp,
4565 SUBREG_PROMOTED_UNSIGNED_P (target));
4567 /* If we promoted a constant, change the mode back down to match
4568 target. Otherwise, the caller might get confused by a result whose
4569 mode is larger than expected. */
4571 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4573 if (GET_MODE (temp) != VOIDmode)
4575 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4576 SUBREG_PROMOTED_VAR_P (temp) = 1;
4577 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4578 SUBREG_PROMOTED_UNSIGNED_P (target));
4581 temp = convert_modes (GET_MODE (target),
4582 GET_MODE (SUBREG_REG (target)),
4583 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4586 return want_value & 1 ? temp : NULL_RTX;
4590 temp = expand_expr (exp, target, GET_MODE (target),
4591 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4592 /* Return TARGET if it's a specified hardware register.
4593 If TARGET is a volatile mem ref, either return TARGET
4594 or return a reg copied *from* TARGET; ANSI requires this.
4596 Otherwise, if TEMP is not TARGET, return TEMP
4597 if it is constant (for efficiency),
4598 or if we really want the correct value. */
4599 if (!(target && GET_CODE (target) == REG
4600 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4601 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4602 && ! rtx_equal_p (temp, target)
4603 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4604 dont_return_target = 1;
4607 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4608 the same as that of TARGET, adjust the constant. This is needed, for
4609 example, in case it is a CONST_DOUBLE and we want only a word-sized
4611 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4612 && TREE_CODE (exp) != ERROR_MARK
4613 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4614 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4615 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4617 /* If value was not generated in the target, store it there.
4618 Convert the value to TARGET's type first if necessary.
4619 If TEMP and TARGET compare equal according to rtx_equal_p, but
4620 one or both of them are volatile memory refs, we have to distinguish
4622 - expand_expr has used TARGET. In this case, we must not generate
4623 another copy. This can be detected by TARGET being equal according
4625 - expand_expr has not used TARGET - that means that the source just
4626 happens to have the same RTX form. Since temp will have been created
4627 by expand_expr, it will compare unequal according to == .
4628 We must generate a copy in this case, to reach the correct number
4629 of volatile memory references. */
4631 if ((! rtx_equal_p (temp, target)
4632 || (temp != target && (side_effects_p (temp)
4633 || side_effects_p (target))))
4634 && TREE_CODE (exp) != ERROR_MARK
4635 && ! dont_store_target
4636 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4637 but TARGET is not valid memory reference, TEMP will differ
4638 from TARGET although it is really the same location. */
4639 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4640 || target != DECL_RTL_IF_SET (exp))
4641 /* If there's nothing to copy, don't bother. Don't call expr_size
4642 unless necessary, because some front-ends (C++) expr_size-hook
4643 aborts on objects that are not supposed to be bit-copied or
4645 && expr_size (exp) != const0_rtx)
4647 target = protect_from_queue (target, 1);
4648 if (GET_MODE (temp) != GET_MODE (target)
4649 && GET_MODE (temp) != VOIDmode)
4651 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4652 if (dont_return_target)
4654 /* In this case, we will return TEMP,
4655 so make sure it has the proper mode.
4656 But don't forget to store the value into TARGET. */
4657 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4658 emit_move_insn (target, temp);
4661 convert_move (target, temp, unsignedp);
4664 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4666 /* Handle copying a string constant into an array. The string
4667 constant may be shorter than the array. So copy just the string's
4668 actual length, and clear the rest. First get the size of the data
4669 type of the string, which is actually the size of the target. */
4670 rtx size = expr_size (exp);
4672 if (GET_CODE (size) == CONST_INT
4673 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4674 emit_block_move (target, temp, size,
4676 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4679 /* Compute the size of the data to copy from the string. */
4681 = size_binop (MIN_EXPR,
4682 make_tree (sizetype, size),
4683 size_int (TREE_STRING_LENGTH (exp)));
4685 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4687 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4690 /* Copy that much. */
4691 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4692 TREE_UNSIGNED (sizetype));
4693 emit_block_move (target, temp, copy_size_rtx,
4695 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4697 /* Figure out how much is left in TARGET that we have to clear.
4698 Do all calculations in ptr_mode. */
4699 if (GET_CODE (copy_size_rtx) == CONST_INT)
4701 size = plus_constant (size, -INTVAL (copy_size_rtx));
4702 target = adjust_address (target, BLKmode,
4703 INTVAL (copy_size_rtx));
4707 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4708 copy_size_rtx, NULL_RTX, 0,
4711 #ifdef POINTERS_EXTEND_UNSIGNED
4712 if (GET_MODE (copy_size_rtx) != Pmode)
4713 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4714 TREE_UNSIGNED (sizetype));
4717 target = offset_address (target, copy_size_rtx,
4718 highest_pow2_factor (copy_size));
4719 label = gen_label_rtx ();
4720 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4721 GET_MODE (size), 0, label);
4724 if (size != const0_rtx)
4725 clear_storage (target, size);
4731 /* Handle calls that return values in multiple non-contiguous locations.
4732 The Irix 6 ABI has examples of this. */
4733 else if (GET_CODE (target) == PARALLEL)
4734 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4735 else if (GET_MODE (temp) == BLKmode)
4736 emit_block_move (target, temp, expr_size (exp),
4738 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4740 emit_move_insn (target, temp);
4743 /* If we don't want a value, return NULL_RTX. */
4744 if ((want_value & 1) == 0)
4747 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4748 ??? The latter test doesn't seem to make sense. */
4749 else if (dont_return_target && GET_CODE (temp) != MEM)
4752 /* Return TARGET itself if it is a hard register. */
4753 else if ((want_value & 1) != 0
4754 && GET_MODE (target) != BLKmode
4755 && ! (GET_CODE (target) == REG
4756 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4757 return copy_to_reg (target);
4763 /* Return 1 if EXP just contains zeros. */
4771 switch (TREE_CODE (exp))
4775 case NON_LVALUE_EXPR:
4776 case VIEW_CONVERT_EXPR:
4777 return is_zeros_p (TREE_OPERAND (exp, 0));
4780 return integer_zerop (exp);
4784 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4787 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4790 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4791 elt = TREE_CHAIN (elt))
4792 if (!is_zeros_p (TREE_VALUE (elt)))
4798 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4799 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4800 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4801 if (! is_zeros_p (TREE_VALUE (elt)))
4811 /* Return 1 if EXP contains mostly (3/4) zeros. */
4814 mostly_zeros_p (exp)
4817 if (TREE_CODE (exp) == CONSTRUCTOR)
4819 int elts = 0, zeros = 0;
4820 tree elt = CONSTRUCTOR_ELTS (exp);
4821 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4823 /* If there are no ranges of true bits, it is all zero. */
4824 return elt == NULL_TREE;
4826 for (; elt; elt = TREE_CHAIN (elt))
4828 /* We do not handle the case where the index is a RANGE_EXPR,
4829 so the statistic will be somewhat inaccurate.
4830 We do make a more accurate count in store_constructor itself,
4831 so since this function is only used for nested array elements,
4832 this should be close enough. */
4833 if (mostly_zeros_p (TREE_VALUE (elt)))
4838 return 4 * zeros >= 3 * elts;
4841 return is_zeros_p (exp);
4844 /* Helper function for store_constructor.
4845 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4846 TYPE is the type of the CONSTRUCTOR, not the element type.
4847 CLEARED is as for store_constructor.
4848 ALIAS_SET is the alias set to use for any stores.
4850 This provides a recursive shortcut back to store_constructor when it isn't
4851 necessary to go through store_field. This is so that we can pass through
4852 the cleared field to let store_constructor know that we may not have to
4853 clear a substructure if the outer structure has already been cleared. */
4856 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4859 unsigned HOST_WIDE_INT bitsize;
4860 HOST_WIDE_INT bitpos;
4861 enum machine_mode mode;
4866 if (TREE_CODE (exp) == CONSTRUCTOR
4867 && bitpos % BITS_PER_UNIT == 0
4868 /* If we have a nonzero bitpos for a register target, then we just
4869 let store_field do the bitfield handling. This is unlikely to
4870 generate unnecessary clear instructions anyways. */
4871 && (bitpos == 0 || GET_CODE (target) == MEM))
4873 if (GET_CODE (target) == MEM)
4875 = adjust_address (target,
4876 GET_MODE (target) == BLKmode
4878 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4879 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4882 /* Update the alias set, if required. */
4883 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4884 && MEM_ALIAS_SET (target) != 0)
4886 target = copy_rtx (target);
4887 set_mem_alias_set (target, alias_set);
4890 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4893 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4897 /* Store the value of constructor EXP into the rtx TARGET.
4898 TARGET is either a REG or a MEM; we know it cannot conflict, since
4899 safe_from_p has been called.
4900 CLEARED is true if TARGET is known to have been zero'd.
4901 SIZE is the number of bytes of TARGET we are allowed to modify: this
4902 may not be the same as the size of EXP if we are assigning to a field
4903 which has been packed to exclude padding bits. */
4906 store_constructor (exp, target, cleared, size)
4912 tree type = TREE_TYPE (exp);
4913 #ifdef WORD_REGISTER_OPERATIONS
4914 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4917 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4918 || TREE_CODE (type) == QUAL_UNION_TYPE)
4922 /* We either clear the aggregate or indicate the value is dead. */
4923 if ((TREE_CODE (type) == UNION_TYPE
4924 || TREE_CODE (type) == QUAL_UNION_TYPE)
4926 && ! CONSTRUCTOR_ELTS (exp))
4927 /* If the constructor is empty, clear the union. */
4929 clear_storage (target, expr_size (exp));
4933 /* If we are building a static constructor into a register,
4934 set the initial value as zero so we can fold the value into
4935 a constant. But if more than one register is involved,
4936 this probably loses. */
4937 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4938 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4940 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4944 /* If the constructor has fewer fields than the structure
4945 or if we are initializing the structure to mostly zeros,
4946 clear the whole structure first. Don't do this if TARGET is a
4947 register whose mode size isn't equal to SIZE since clear_storage
4948 can't handle this case. */
4949 else if (! cleared && size > 0
4950 && ((list_length (CONSTRUCTOR_ELTS (exp))
4951 != fields_length (type))
4952 || mostly_zeros_p (exp))
4953 && (GET_CODE (target) != REG
4954 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4957 rtx xtarget = target;
4959 if (readonly_fields_p (type))
4961 xtarget = copy_rtx (xtarget);
4962 RTX_UNCHANGING_P (xtarget) = 1;
4965 clear_storage (xtarget, GEN_INT (size));
4970 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4972 /* Store each element of the constructor into
4973 the corresponding field of TARGET. */
4975 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4977 tree field = TREE_PURPOSE (elt);
4978 tree value = TREE_VALUE (elt);
4979 enum machine_mode mode;
4980 HOST_WIDE_INT bitsize;
4981 HOST_WIDE_INT bitpos = 0;
4983 rtx to_rtx = target;
4985 /* Just ignore missing fields.
4986 We cleared the whole structure, above,
4987 if any fields are missing. */
4991 if (cleared && is_zeros_p (value))
4994 if (host_integerp (DECL_SIZE (field), 1))
4995 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4999 mode = DECL_MODE (field);
5000 if (DECL_BIT_FIELD (field))
5003 offset = DECL_FIELD_OFFSET (field);
5004 if (host_integerp (offset, 0)
5005 && host_integerp (bit_position (field), 0))
5007 bitpos = int_bit_position (field);
5011 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5017 if (contains_placeholder_p (offset))
5018 offset = build (WITH_RECORD_EXPR, sizetype,
5019 offset, make_tree (TREE_TYPE (exp), target));
5021 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5022 if (GET_CODE (to_rtx) != MEM)
5025 #ifdef POINTERS_EXTEND_UNSIGNED
5026 if (GET_MODE (offset_rtx) != Pmode)
5027 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5029 if (GET_MODE (offset_rtx) != ptr_mode)
5030 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5033 to_rtx = offset_address (to_rtx, offset_rtx,
5034 highest_pow2_factor (offset));
5037 if (TREE_READONLY (field))
5039 if (GET_CODE (to_rtx) == MEM)
5040 to_rtx = copy_rtx (to_rtx);
5042 RTX_UNCHANGING_P (to_rtx) = 1;
5045 #ifdef WORD_REGISTER_OPERATIONS
5046 /* If this initializes a field that is smaller than a word, at the
5047 start of a word, try to widen it to a full word.
5048 This special case allows us to output C++ member function
5049 initializations in a form that the optimizers can understand. */
5050 if (GET_CODE (target) == REG
5051 && bitsize < BITS_PER_WORD
5052 && bitpos % BITS_PER_WORD == 0
5053 && GET_MODE_CLASS (mode) == MODE_INT
5054 && TREE_CODE (value) == INTEGER_CST
5056 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5058 tree type = TREE_TYPE (value);
5060 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5062 type = (*lang_hooks.types.type_for_size)
5063 (BITS_PER_WORD, TREE_UNSIGNED (type));
5064 value = convert (type, value);
5067 if (BYTES_BIG_ENDIAN)
5069 = fold (build (LSHIFT_EXPR, type, value,
5070 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5071 bitsize = BITS_PER_WORD;
5076 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5077 && DECL_NONADDRESSABLE_P (field))
5079 to_rtx = copy_rtx (to_rtx);
5080 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5083 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5084 value, type, cleared,
5085 get_alias_set (TREE_TYPE (field)));
5088 else if (TREE_CODE (type) == ARRAY_TYPE
5089 || TREE_CODE (type) == VECTOR_TYPE)
5094 tree domain = TYPE_DOMAIN (type);
5095 tree elttype = TREE_TYPE (type);
5097 HOST_WIDE_INT minelt = 0;
5098 HOST_WIDE_INT maxelt = 0;
5100 /* Vectors are like arrays, but the domain is stored via an array
5102 if (TREE_CODE (type) == VECTOR_TYPE)
5104 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5105 the same field as TYPE_DOMAIN, we are not guaranteed that
5107 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5108 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5111 const_bounds_p = (TYPE_MIN_VALUE (domain)
5112 && TYPE_MAX_VALUE (domain)
5113 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5114 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5116 /* If we have constant bounds for the range of the type, get them. */
5119 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5120 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5123 /* If the constructor has fewer elements than the array,
5124 clear the whole array first. Similarly if this is
5125 static constructor of a non-BLKmode object. */
5126 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5130 HOST_WIDE_INT count = 0, zero_count = 0;
5131 need_to_clear = ! const_bounds_p;
5133 /* This loop is a more accurate version of the loop in
5134 mostly_zeros_p (it handles RANGE_EXPR in an index).
5135 It is also needed to check for missing elements. */
5136 for (elt = CONSTRUCTOR_ELTS (exp);
5137 elt != NULL_TREE && ! need_to_clear;
5138 elt = TREE_CHAIN (elt))
5140 tree index = TREE_PURPOSE (elt);
5141 HOST_WIDE_INT this_node_count;
5143 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5145 tree lo_index = TREE_OPERAND (index, 0);
5146 tree hi_index = TREE_OPERAND (index, 1);
5148 if (! host_integerp (lo_index, 1)
5149 || ! host_integerp (hi_index, 1))
5155 this_node_count = (tree_low_cst (hi_index, 1)
5156 - tree_low_cst (lo_index, 1) + 1);
5159 this_node_count = 1;
5161 count += this_node_count;
5162 if (mostly_zeros_p (TREE_VALUE (elt)))
5163 zero_count += this_node_count;
5166 /* Clear the entire array first if there are any missing elements,
5167 or if the incidence of zero elements is >= 75%. */
5169 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5173 if (need_to_clear && size > 0)
5178 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5180 clear_storage (target, GEN_INT (size));
5184 else if (REG_P (target))
5185 /* Inform later passes that the old value is dead. */
5186 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5188 /* Store each element of the constructor into
5189 the corresponding element of TARGET, determined
5190 by counting the elements. */
5191 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5193 elt = TREE_CHAIN (elt), i++)
5195 enum machine_mode mode;
5196 HOST_WIDE_INT bitsize;
5197 HOST_WIDE_INT bitpos;
5199 tree value = TREE_VALUE (elt);
5200 tree index = TREE_PURPOSE (elt);
5201 rtx xtarget = target;
5203 if (cleared && is_zeros_p (value))
5206 unsignedp = TREE_UNSIGNED (elttype);
5207 mode = TYPE_MODE (elttype);
5208 if (mode == BLKmode)
5209 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5210 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5213 bitsize = GET_MODE_BITSIZE (mode);
5215 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5217 tree lo_index = TREE_OPERAND (index, 0);
5218 tree hi_index = TREE_OPERAND (index, 1);
5219 rtx index_r, pos_rtx, loop_end;
5220 struct nesting *loop;
5221 HOST_WIDE_INT lo, hi, count;
5224 /* If the range is constant and "small", unroll the loop. */
5226 && host_integerp (lo_index, 0)
5227 && host_integerp (hi_index, 0)
5228 && (lo = tree_low_cst (lo_index, 0),
5229 hi = tree_low_cst (hi_index, 0),
5230 count = hi - lo + 1,
5231 (GET_CODE (target) != MEM
5233 || (host_integerp (TYPE_SIZE (elttype), 1)
5234 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5237 lo -= minelt; hi -= minelt;
5238 for (; lo <= hi; lo++)
5240 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5242 if (GET_CODE (target) == MEM
5243 && !MEM_KEEP_ALIAS_SET_P (target)
5244 && TREE_CODE (type) == ARRAY_TYPE
5245 && TYPE_NONALIASED_COMPONENT (type))
5247 target = copy_rtx (target);
5248 MEM_KEEP_ALIAS_SET_P (target) = 1;
5251 store_constructor_field
5252 (target, bitsize, bitpos, mode, value, type, cleared,
5253 get_alias_set (elttype));
5258 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5259 loop_end = gen_label_rtx ();
5261 unsignedp = TREE_UNSIGNED (domain);
5263 index = build_decl (VAR_DECL, NULL_TREE, domain);
5266 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5268 SET_DECL_RTL (index, index_r);
5269 if (TREE_CODE (value) == SAVE_EXPR
5270 && SAVE_EXPR_RTL (value) == 0)
5272 /* Make sure value gets expanded once before the
5274 expand_expr (value, const0_rtx, VOIDmode, 0);
5277 store_expr (lo_index, index_r, 0);
5278 loop = expand_start_loop (0);
5280 /* Assign value to element index. */
5282 = convert (ssizetype,
5283 fold (build (MINUS_EXPR, TREE_TYPE (index),
5284 index, TYPE_MIN_VALUE (domain))));
5285 position = size_binop (MULT_EXPR, position,
5287 TYPE_SIZE_UNIT (elttype)));
5289 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5290 xtarget = offset_address (target, pos_rtx,
5291 highest_pow2_factor (position));
5292 xtarget = adjust_address (xtarget, mode, 0);
5293 if (TREE_CODE (value) == CONSTRUCTOR)
5294 store_constructor (value, xtarget, cleared,
5295 bitsize / BITS_PER_UNIT);
5297 store_expr (value, xtarget, 0);
5299 expand_exit_loop_if_false (loop,
5300 build (LT_EXPR, integer_type_node,
5303 expand_increment (build (PREINCREMENT_EXPR,
5305 index, integer_one_node), 0, 0);
5307 emit_label (loop_end);
5310 else if ((index != 0 && ! host_integerp (index, 0))
5311 || ! host_integerp (TYPE_SIZE (elttype), 1))
5316 index = ssize_int (1);
5319 index = convert (ssizetype,
5320 fold (build (MINUS_EXPR, index,
5321 TYPE_MIN_VALUE (domain))));
5323 position = size_binop (MULT_EXPR, index,
5325 TYPE_SIZE_UNIT (elttype)));
5326 xtarget = offset_address (target,
5327 expand_expr (position, 0, VOIDmode, 0),
5328 highest_pow2_factor (position));
5329 xtarget = adjust_address (xtarget, mode, 0);
5330 store_expr (value, xtarget, 0);
5335 bitpos = ((tree_low_cst (index, 0) - minelt)
5336 * tree_low_cst (TYPE_SIZE (elttype), 1));
5338 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5340 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5341 && TREE_CODE (type) == ARRAY_TYPE
5342 && TYPE_NONALIASED_COMPONENT (type))
5344 target = copy_rtx (target);
5345 MEM_KEEP_ALIAS_SET_P (target) = 1;
5348 store_constructor_field (target, bitsize, bitpos, mode, value,
5349 type, cleared, get_alias_set (elttype));
5355 /* Set constructor assignments. */
5356 else if (TREE_CODE (type) == SET_TYPE)
5358 tree elt = CONSTRUCTOR_ELTS (exp);
5359 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5360 tree domain = TYPE_DOMAIN (type);
5361 tree domain_min, domain_max, bitlength;
5363 /* The default implementation strategy is to extract the constant
5364 parts of the constructor, use that to initialize the target,
5365 and then "or" in whatever non-constant ranges we need in addition.
5367 If a large set is all zero or all ones, it is
5368 probably better to set it using memset (if available) or bzero.
5369 Also, if a large set has just a single range, it may also be
5370 better to first clear all the first clear the set (using
5371 bzero/memset), and set the bits we want. */
5373 /* Check for all zeros. */
5374 if (elt == NULL_TREE && size > 0)
5377 clear_storage (target, GEN_INT (size));
5381 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5382 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5383 bitlength = size_binop (PLUS_EXPR,
5384 size_diffop (domain_max, domain_min),
5387 nbits = tree_low_cst (bitlength, 1);
5389 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5390 are "complicated" (more than one range), initialize (the
5391 constant parts) by copying from a constant. */
5392 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5393 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5395 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5396 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5397 char *bit_buffer = (char *) alloca (nbits);
5398 HOST_WIDE_INT word = 0;
5399 unsigned int bit_pos = 0;
5400 unsigned int ibit = 0;
5401 unsigned int offset = 0; /* In bytes from beginning of set. */
5403 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5406 if (bit_buffer[ibit])
5408 if (BYTES_BIG_ENDIAN)
5409 word |= (1 << (set_word_size - 1 - bit_pos));
5411 word |= 1 << bit_pos;
5415 if (bit_pos >= set_word_size || ibit == nbits)
5417 if (word != 0 || ! cleared)
5419 rtx datum = GEN_INT (word);
5422 /* The assumption here is that it is safe to use
5423 XEXP if the set is multi-word, but not if
5424 it's single-word. */
5425 if (GET_CODE (target) == MEM)
5426 to_rtx = adjust_address (target, mode, offset);
5427 else if (offset == 0)
5431 emit_move_insn (to_rtx, datum);
5438 offset += set_word_size / BITS_PER_UNIT;
5443 /* Don't bother clearing storage if the set is all ones. */
5444 if (TREE_CHAIN (elt) != NULL_TREE
5445 || (TREE_PURPOSE (elt) == NULL_TREE
5447 : ( ! host_integerp (TREE_VALUE (elt), 0)
5448 || ! host_integerp (TREE_PURPOSE (elt), 0)
5449 || (tree_low_cst (TREE_VALUE (elt), 0)
5450 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5451 != (HOST_WIDE_INT) nbits))))
5452 clear_storage (target, expr_size (exp));
5454 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5456 /* Start of range of element or NULL. */
5457 tree startbit = TREE_PURPOSE (elt);
5458 /* End of range of element, or element value. */
5459 tree endbit = TREE_VALUE (elt);
5460 HOST_WIDE_INT startb, endb;
5461 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5463 bitlength_rtx = expand_expr (bitlength,
5464 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5466 /* Handle non-range tuple element like [ expr ]. */
5467 if (startbit == NULL_TREE)
5469 startbit = save_expr (endbit);
5473 startbit = convert (sizetype, startbit);
5474 endbit = convert (sizetype, endbit);
5475 if (! integer_zerop (domain_min))
5477 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5478 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5480 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5481 EXPAND_CONST_ADDRESS);
5482 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5483 EXPAND_CONST_ADDRESS);
5489 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5490 (GET_MODE (target), 0),
5493 emit_move_insn (targetx, target);
5496 else if (GET_CODE (target) == MEM)
5501 /* Optimization: If startbit and endbit are constants divisible
5502 by BITS_PER_UNIT, call memset instead. */
5503 if (TARGET_MEM_FUNCTIONS
5504 && TREE_CODE (startbit) == INTEGER_CST
5505 && TREE_CODE (endbit) == INTEGER_CST
5506 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5507 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5509 emit_library_call (memset_libfunc, LCT_NORMAL,
5511 plus_constant (XEXP (targetx, 0),
5512 startb / BITS_PER_UNIT),
5514 constm1_rtx, TYPE_MODE (integer_type_node),
5515 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5516 TYPE_MODE (sizetype));
5519 emit_library_call (setbits_libfunc, LCT_NORMAL,
5520 VOIDmode, 4, XEXP (targetx, 0),
5521 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5522 startbit_rtx, TYPE_MODE (sizetype),
5523 endbit_rtx, TYPE_MODE (sizetype));
5526 emit_move_insn (target, targetx);
5534 /* Store the value of EXP (an expression tree)
5535 into a subfield of TARGET which has mode MODE and occupies
5536 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5537 If MODE is VOIDmode, it means that we are storing into a bit-field.
5539 If VALUE_MODE is VOIDmode, return nothing in particular.
5540 UNSIGNEDP is not used in this case.
5542 Otherwise, return an rtx for the value stored. This rtx
5543 has mode VALUE_MODE if that is convenient to do.
5544 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5546 TYPE is the type of the underlying object,
5548 ALIAS_SET is the alias set for the destination. This value will
5549 (in general) be different from that for TARGET, since TARGET is a
5550 reference to the containing structure. */
5553 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5556 HOST_WIDE_INT bitsize;
5557 HOST_WIDE_INT bitpos;
5558 enum machine_mode mode;
5560 enum machine_mode value_mode;
5565 HOST_WIDE_INT width_mask = 0;
5567 if (TREE_CODE (exp) == ERROR_MARK)
5570 /* If we have nothing to store, do nothing unless the expression has
5573 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5574 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5575 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5577 /* If we are storing into an unaligned field of an aligned union that is
5578 in a register, we may have the mode of TARGET being an integer mode but
5579 MODE == BLKmode. In that case, get an aligned object whose size and
5580 alignment are the same as TARGET and store TARGET into it (we can avoid
5581 the store if the field being stored is the entire width of TARGET). Then
5582 call ourselves recursively to store the field into a BLKmode version of
5583 that object. Finally, load from the object into TARGET. This is not
5584 very efficient in general, but should only be slightly more expensive
5585 than the otherwise-required unaligned accesses. Perhaps this can be
5586 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5587 twice, once with emit_move_insn and once via store_field. */
5590 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5592 rtx object = assign_temp (type, 0, 1, 1);
5593 rtx blk_object = adjust_address (object, BLKmode, 0);
5595 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5596 emit_move_insn (object, target);
5598 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5601 emit_move_insn (target, object);
5603 /* We want to return the BLKmode version of the data. */
5607 if (GET_CODE (target) == CONCAT)
5609 /* We're storing into a struct containing a single __complex. */
5613 return store_expr (exp, target, 0);
5616 /* If the structure is in a register or if the component
5617 is a bit field, we cannot use addressing to access it.
5618 Use bit-field techniques or SUBREG to store in it. */
5620 if (mode == VOIDmode
5621 || (mode != BLKmode && ! direct_store[(int) mode]
5622 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5624 || GET_CODE (target) == REG
5625 || GET_CODE (target) == SUBREG
5626 /* If the field isn't aligned enough to store as an ordinary memref,
5627 store it as a bit field. */
5629 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5630 || bitpos % GET_MODE_ALIGNMENT (mode))
5631 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5632 || (bitpos % BITS_PER_UNIT != 0)))
5633 /* If the RHS and field are a constant size and the size of the
5634 RHS isn't the same size as the bitfield, we must use bitfield
5637 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5638 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5640 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5642 /* If BITSIZE is narrower than the size of the type of EXP
5643 we will be narrowing TEMP. Normally, what's wanted are the
5644 low-order bits. However, if EXP's type is a record and this is
5645 big-endian machine, we want the upper BITSIZE bits. */
5646 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5647 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5648 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5649 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5650 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5654 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5656 if (mode != VOIDmode && mode != BLKmode
5657 && mode != TYPE_MODE (TREE_TYPE (exp)))
5658 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5660 /* If the modes of TARGET and TEMP are both BLKmode, both
5661 must be in memory and BITPOS must be aligned on a byte
5662 boundary. If so, we simply do a block copy. */
5663 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5665 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5666 || bitpos % BITS_PER_UNIT != 0)
5669 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5670 emit_block_move (target, temp,
5671 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5675 return value_mode == VOIDmode ? const0_rtx : target;
5678 /* Store the value in the bitfield. */
5679 store_bit_field (target, bitsize, bitpos, mode, temp,
5680 int_size_in_bytes (type));
5682 if (value_mode != VOIDmode)
5684 /* The caller wants an rtx for the value.
5685 If possible, avoid refetching from the bitfield itself. */
5687 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5690 enum machine_mode tmode;
5692 tmode = GET_MODE (temp);
5693 if (tmode == VOIDmode)
5697 return expand_and (tmode, temp,
5698 gen_int_mode (width_mask, tmode),
5701 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5702 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5703 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5706 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5707 NULL_RTX, value_mode, VOIDmode,
5708 int_size_in_bytes (type));
5714 rtx addr = XEXP (target, 0);
5715 rtx to_rtx = target;
5717 /* If a value is wanted, it must be the lhs;
5718 so make the address stable for multiple use. */
5720 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5721 && ! CONSTANT_ADDRESS_P (addr)
5722 /* A frame-pointer reference is already stable. */
5723 && ! (GET_CODE (addr) == PLUS
5724 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5725 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5726 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5727 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5729 /* Now build a reference to just the desired component. */
5731 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5733 if (to_rtx == target)
5734 to_rtx = copy_rtx (to_rtx);
5736 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5737 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5738 set_mem_alias_set (to_rtx, alias_set);
5740 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5744 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5745 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5746 codes and find the ultimate containing object, which we return.
5748 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5749 bit position, and *PUNSIGNEDP to the signedness of the field.
5750 If the position of the field is variable, we store a tree
5751 giving the variable offset (in units) in *POFFSET.
5752 This offset is in addition to the bit position.
5753 If the position is not variable, we store 0 in *POFFSET.
5755 If any of the extraction expressions is volatile,
5756 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5758 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5759 is a mode that can be used to access the field. In that case, *PBITSIZE
5762 If the field describes a variable-sized object, *PMODE is set to
5763 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5764 this case, but the address of the object can be found. */
5767 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5768 punsignedp, pvolatilep)
5770 HOST_WIDE_INT *pbitsize;
5771 HOST_WIDE_INT *pbitpos;
5773 enum machine_mode *pmode;
5778 enum machine_mode mode = VOIDmode;
5779 tree offset = size_zero_node;
5780 tree bit_offset = bitsize_zero_node;
5781 tree placeholder_ptr = 0;
5784 /* First get the mode, signedness, and size. We do this from just the
5785 outermost expression. */
5786 if (TREE_CODE (exp) == COMPONENT_REF)
5788 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5789 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5790 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5792 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5794 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5796 size_tree = TREE_OPERAND (exp, 1);
5797 *punsignedp = TREE_UNSIGNED (exp);
5801 mode = TYPE_MODE (TREE_TYPE (exp));
5802 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5804 if (mode == BLKmode)
5805 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5807 *pbitsize = GET_MODE_BITSIZE (mode);
5812 if (! host_integerp (size_tree, 1))
5813 mode = BLKmode, *pbitsize = -1;
5815 *pbitsize = tree_low_cst (size_tree, 1);
5818 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5819 and find the ultimate containing object. */
5822 if (TREE_CODE (exp) == BIT_FIELD_REF)
5823 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5824 else if (TREE_CODE (exp) == COMPONENT_REF)
5826 tree field = TREE_OPERAND (exp, 1);
5827 tree this_offset = DECL_FIELD_OFFSET (field);
5829 /* If this field hasn't been filled in yet, don't go
5830 past it. This should only happen when folding expressions
5831 made during type construction. */
5832 if (this_offset == 0)
5834 else if (! TREE_CONSTANT (this_offset)
5835 && contains_placeholder_p (this_offset))
5836 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5838 offset = size_binop (PLUS_EXPR, offset, this_offset);
5839 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5840 DECL_FIELD_BIT_OFFSET (field));
5842 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5845 else if (TREE_CODE (exp) == ARRAY_REF
5846 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5848 tree index = TREE_OPERAND (exp, 1);
5849 tree array = TREE_OPERAND (exp, 0);
5850 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5851 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5852 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5854 /* We assume all arrays have sizes that are a multiple of a byte.
5855 First subtract the lower bound, if any, in the type of the
5856 index, then convert to sizetype and multiply by the size of the
5858 if (low_bound != 0 && ! integer_zerop (low_bound))
5859 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5862 /* If the index has a self-referential type, pass it to a
5863 WITH_RECORD_EXPR; if the component size is, pass our
5864 component to one. */
5865 if (! TREE_CONSTANT (index)
5866 && contains_placeholder_p (index))
5867 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5868 if (! TREE_CONSTANT (unit_size)
5869 && contains_placeholder_p (unit_size))
5870 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5872 offset = size_binop (PLUS_EXPR, offset,
5873 size_binop (MULT_EXPR,
5874 convert (sizetype, index),
5878 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5880 tree new = find_placeholder (exp, &placeholder_ptr);
5882 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5883 We might have been called from tree optimization where we
5884 haven't set up an object yet. */
5893 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5894 conversions that don't change the mode, and all view conversions
5895 except those that need to "step up" the alignment. */
5896 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5897 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5898 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5899 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5901 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5902 < BIGGEST_ALIGNMENT)
5903 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5904 || TYPE_ALIGN_OK (TREE_TYPE
5905 (TREE_OPERAND (exp, 0))))))
5906 && ! ((TREE_CODE (exp) == NOP_EXPR
5907 || TREE_CODE (exp) == CONVERT_EXPR)
5908 && (TYPE_MODE (TREE_TYPE (exp))
5909 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5912 /* If any reference in the chain is volatile, the effect is volatile. */
5913 if (TREE_THIS_VOLATILE (exp))
5916 exp = TREE_OPERAND (exp, 0);
5919 /* If OFFSET is constant, see if we can return the whole thing as a
5920 constant bit position. Otherwise, split it up. */
5921 if (host_integerp (offset, 0)
5922 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5924 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5925 && host_integerp (tem, 0))
5926 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5928 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5934 /* Return 1 if T is an expression that get_inner_reference handles. */
5937 handled_component_p (t)
5940 switch (TREE_CODE (t))
5945 case ARRAY_RANGE_REF:
5946 case NON_LVALUE_EXPR:
5947 case VIEW_CONVERT_EXPR:
5952 return (TYPE_MODE (TREE_TYPE (t))
5953 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5960 /* Given an rtx VALUE that may contain additions and multiplications, return
5961 an equivalent value that just refers to a register, memory, or constant.
5962 This is done by generating instructions to perform the arithmetic and
5963 returning a pseudo-register containing the value.
5965 The returned value may be a REG, SUBREG, MEM or constant. */
5968 force_operand (value, target)
5972 /* Use subtarget as the target for operand 0 of a binary operation. */
5973 rtx subtarget = get_subtarget (target);
5974 enum rtx_code code = GET_CODE (value);
5976 /* Check for a PIC address load. */
5977 if ((code == PLUS || code == MINUS)
5978 && XEXP (value, 0) == pic_offset_table_rtx
5979 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5980 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5981 || GET_CODE (XEXP (value, 1)) == CONST))
5984 subtarget = gen_reg_rtx (GET_MODE (value));
5985 emit_move_insn (subtarget, value);
5989 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5992 target = gen_reg_rtx (GET_MODE (value));
5993 convert_move (target, force_operand (XEXP (value, 0), NULL),
5994 code == ZERO_EXTEND);
5998 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
6000 op2 = XEXP (value, 1);
6001 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
6003 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6006 op2 = negate_rtx (GET_MODE (value), op2);
6009 /* Check for an addition with OP2 a constant integer and our first
6010 operand a PLUS of a virtual register and something else. In that
6011 case, we want to emit the sum of the virtual register and the
6012 constant first and then add the other value. This allows virtual
6013 register instantiation to simply modify the constant rather than
6014 creating another one around this addition. */
6015 if (code == PLUS && GET_CODE (op2) == CONST_INT
6016 && GET_CODE (XEXP (value, 0)) == PLUS
6017 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6018 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6019 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6021 rtx temp = expand_simple_binop (GET_MODE (value), code,
6022 XEXP (XEXP (value, 0), 0), op2,
6023 subtarget, 0, OPTAB_LIB_WIDEN);
6024 return expand_simple_binop (GET_MODE (value), code, temp,
6025 force_operand (XEXP (XEXP (value,
6027 target, 0, OPTAB_LIB_WIDEN);
6030 op1 = force_operand (XEXP (value, 0), subtarget);
6031 op2 = force_operand (op2, NULL_RTX);
6035 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6037 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6038 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6039 target, 1, OPTAB_LIB_WIDEN);
6041 return expand_divmod (0,
6042 FLOAT_MODE_P (GET_MODE (value))
6043 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6044 GET_MODE (value), op1, op2, target, 0);
6047 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6051 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6055 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6059 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6060 target, 0, OPTAB_LIB_WIDEN);
6063 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6064 target, 1, OPTAB_LIB_WIDEN);
6067 if (GET_RTX_CLASS (code) == '1')
6069 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6070 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6073 #ifdef INSN_SCHEDULING
6074 /* On machines that have insn scheduling, we want all memory reference to be
6075 explicit, so we need to deal with such paradoxical SUBREGs. */
6076 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6077 && (GET_MODE_SIZE (GET_MODE (value))
6078 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6080 = simplify_gen_subreg (GET_MODE (value),
6081 force_reg (GET_MODE (SUBREG_REG (value)),
6082 force_operand (SUBREG_REG (value),
6084 GET_MODE (SUBREG_REG (value)),
6085 SUBREG_BYTE (value));
6091 /* Subroutine of expand_expr: return nonzero iff there is no way that
6092 EXP can reference X, which is being modified. TOP_P is nonzero if this
6093 call is going to be used to determine whether we need a temporary
6094 for EXP, as opposed to a recursive call to this function.
6096 It is always safe for this routine to return zero since it merely
6097 searches for optimization opportunities. */
6100 safe_from_p (x, exp, top_p)
6107 static tree save_expr_list;
6110 /* If EXP has varying size, we MUST use a target since we currently
6111 have no way of allocating temporaries of variable size
6112 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6113 So we assume here that something at a higher level has prevented a
6114 clash. This is somewhat bogus, but the best we can do. Only
6115 do this when X is BLKmode and when we are at the top level. */
6116 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6117 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6118 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6119 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6120 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6122 && GET_MODE (x) == BLKmode)
6123 /* If X is in the outgoing argument area, it is always safe. */
6124 || (GET_CODE (x) == MEM
6125 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6126 || (GET_CODE (XEXP (x, 0)) == PLUS
6127 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6130 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6131 find the underlying pseudo. */
6132 if (GET_CODE (x) == SUBREG)
6135 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6139 /* A SAVE_EXPR might appear many times in the expression passed to the
6140 top-level safe_from_p call, and if it has a complex subexpression,
6141 examining it multiple times could result in a combinatorial explosion.
6142 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6143 with optimization took about 28 minutes to compile -- even though it was
6144 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6145 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6146 we have processed. Note that the only test of top_p was above. */
6155 rtn = safe_from_p (x, exp, 0);
6157 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6158 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6163 /* Now look at our tree code and possibly recurse. */
6164 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6167 exp_rtl = DECL_RTL_IF_SET (exp);
6174 if (TREE_CODE (exp) == TREE_LIST)
6178 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6180 exp = TREE_CHAIN (exp);
6183 if (TREE_CODE (exp) != TREE_LIST)
6184 return safe_from_p (x, exp, 0);
6187 else if (TREE_CODE (exp) == ERROR_MARK)
6188 return 1; /* An already-visited SAVE_EXPR? */
6194 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6199 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6203 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6204 the expression. If it is set, we conflict iff we are that rtx or
6205 both are in memory. Otherwise, we check all operands of the
6206 expression recursively. */
6208 switch (TREE_CODE (exp))
6211 /* If the operand is static or we are static, we can't conflict.
6212 Likewise if we don't conflict with the operand at all. */
6213 if (staticp (TREE_OPERAND (exp, 0))
6214 || TREE_STATIC (exp)
6215 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6218 /* Otherwise, the only way this can conflict is if we are taking
6219 the address of a DECL a that address if part of X, which is
6221 exp = TREE_OPERAND (exp, 0);
6224 if (!DECL_RTL_SET_P (exp)
6225 || GET_CODE (DECL_RTL (exp)) != MEM)
6228 exp_rtl = XEXP (DECL_RTL (exp), 0);
6233 if (GET_CODE (x) == MEM
6234 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6235 get_alias_set (exp)))
6240 /* Assume that the call will clobber all hard registers and
6242 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6243 || GET_CODE (x) == MEM)
6248 /* If a sequence exists, we would have to scan every instruction
6249 in the sequence to see if it was safe. This is probably not
6251 if (RTL_EXPR_SEQUENCE (exp))
6254 exp_rtl = RTL_EXPR_RTL (exp);
6257 case WITH_CLEANUP_EXPR:
6258 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6261 case CLEANUP_POINT_EXPR:
6262 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6265 exp_rtl = SAVE_EXPR_RTL (exp);
6269 /* If we've already scanned this, don't do it again. Otherwise,
6270 show we've scanned it and record for clearing the flag if we're
6272 if (TREE_PRIVATE (exp))
6275 TREE_PRIVATE (exp) = 1;
6276 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6278 TREE_PRIVATE (exp) = 0;
6282 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6286 /* The only operand we look at is operand 1. The rest aren't
6287 part of the expression. */
6288 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6290 case METHOD_CALL_EXPR:
6291 /* This takes an rtx argument, but shouldn't appear here. */
6298 /* If we have an rtx, we do not need to scan our operands. */
6302 nops = first_rtl_op (TREE_CODE (exp));
6303 for (i = 0; i < nops; i++)
6304 if (TREE_OPERAND (exp, i) != 0
6305 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6308 /* If this is a language-specific tree code, it may require
6309 special handling. */
6310 if ((unsigned int) TREE_CODE (exp)
6311 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6312 && !(*lang_hooks.safe_from_p) (x, exp))
6316 /* If we have an rtl, find any enclosed object. Then see if we conflict
6320 if (GET_CODE (exp_rtl) == SUBREG)
6322 exp_rtl = SUBREG_REG (exp_rtl);
6323 if (GET_CODE (exp_rtl) == REG
6324 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6328 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6329 are memory and they conflict. */
6330 return ! (rtx_equal_p (x, exp_rtl)
6331 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6332 && true_dependence (exp_rtl, VOIDmode, x,
6333 rtx_addr_varies_p)));
6336 /* If we reach here, it is safe. */
6340 /* Subroutine of expand_expr: return rtx if EXP is a
6341 variable or parameter; else return 0. */
6348 switch (TREE_CODE (exp))
6352 return DECL_RTL (exp);
6358 #ifdef MAX_INTEGER_COMPUTATION_MODE
6361 check_max_integer_computation_mode (exp)
6364 enum tree_code code;
6365 enum machine_mode mode;
6367 /* Strip any NOPs that don't change the mode. */
6369 code = TREE_CODE (exp);
6371 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6372 if (code == NOP_EXPR
6373 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6376 /* First check the type of the overall operation. We need only look at
6377 unary, binary and relational operations. */
6378 if (TREE_CODE_CLASS (code) == '1'
6379 || TREE_CODE_CLASS (code) == '2'
6380 || TREE_CODE_CLASS (code) == '<')
6382 mode = TYPE_MODE (TREE_TYPE (exp));
6383 if (GET_MODE_CLASS (mode) == MODE_INT
6384 && mode > MAX_INTEGER_COMPUTATION_MODE)
6385 internal_error ("unsupported wide integer operation");
6388 /* Check operand of a unary op. */
6389 if (TREE_CODE_CLASS (code) == '1')
6391 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6392 if (GET_MODE_CLASS (mode) == MODE_INT
6393 && mode > MAX_INTEGER_COMPUTATION_MODE)
6394 internal_error ("unsupported wide integer operation");
6397 /* Check operands of a binary/comparison op. */
6398 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6400 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6401 if (GET_MODE_CLASS (mode) == MODE_INT
6402 && mode > MAX_INTEGER_COMPUTATION_MODE)
6403 internal_error ("unsupported wide integer operation");
6405 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6406 if (GET_MODE_CLASS (mode) == MODE_INT
6407 && mode > MAX_INTEGER_COMPUTATION_MODE)
6408 internal_error ("unsupported wide integer operation");
6413 /* Return the highest power of two that EXP is known to be a multiple of.
6414 This is used in updating alignment of MEMs in array references. */
6416 static unsigned HOST_WIDE_INT
6417 highest_pow2_factor (exp)
6420 unsigned HOST_WIDE_INT c0, c1;
6422 switch (TREE_CODE (exp))
6425 /* We can find the lowest bit that's a one. If the low
6426 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6427 We need to handle this case since we can find it in a COND_EXPR,
6428 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6429 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6431 if (TREE_CONSTANT_OVERFLOW (exp))
6432 return BIGGEST_ALIGNMENT;
6435 /* Note: tree_low_cst is intentionally not used here,
6436 we don't care about the upper bits. */
6437 c0 = TREE_INT_CST_LOW (exp);
6439 return c0 ? c0 : BIGGEST_ALIGNMENT;
6443 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6444 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6445 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6446 return MIN (c0, c1);
6449 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6450 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6453 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6455 if (integer_pow2p (TREE_OPERAND (exp, 1))
6456 && host_integerp (TREE_OPERAND (exp, 1), 1))
6458 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6459 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6460 return MAX (1, c0 / c1);
6464 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6465 case SAVE_EXPR: case WITH_RECORD_EXPR:
6466 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6469 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6472 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6473 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6474 return MIN (c0, c1);
6483 /* Similar, except that it is known that the expression must be a multiple
6484 of the alignment of TYPE. */
6486 static unsigned HOST_WIDE_INT
6487 highest_pow2_factor_for_type (type, exp)
6491 unsigned HOST_WIDE_INT type_align, factor;
6493 factor = highest_pow2_factor (exp);
6494 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6495 return MAX (factor, type_align);
6498 /* Return an object on the placeholder list that matches EXP, a
6499 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6500 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6501 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6502 is a location which initially points to a starting location in the
6503 placeholder list (zero means start of the list) and where a pointer into
6504 the placeholder list at which the object is found is placed. */
6507 find_placeholder (exp, plist)
6511 tree type = TREE_TYPE (exp);
6512 tree placeholder_expr;
6514 for (placeholder_expr
6515 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6516 placeholder_expr != 0;
6517 placeholder_expr = TREE_CHAIN (placeholder_expr))
6519 tree need_type = TYPE_MAIN_VARIANT (type);
6522 /* Find the outermost reference that is of the type we want. If none,
6523 see if any object has a type that is a pointer to the type we
6525 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6526 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6527 || TREE_CODE (elt) == COND_EXPR)
6528 ? TREE_OPERAND (elt, 1)
6529 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6530 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6531 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6532 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6533 ? TREE_OPERAND (elt, 0) : 0))
6534 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6537 *plist = placeholder_expr;
6541 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6543 = ((TREE_CODE (elt) == COMPOUND_EXPR
6544 || TREE_CODE (elt) == COND_EXPR)
6545 ? TREE_OPERAND (elt, 1)
6546 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6547 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6548 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6549 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6550 ? TREE_OPERAND (elt, 0) : 0))
6551 if (POINTER_TYPE_P (TREE_TYPE (elt))
6552 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6556 *plist = placeholder_expr;
6557 return build1 (INDIRECT_REF, need_type, elt);
6564 /* expand_expr: generate code for computing expression EXP.
6565 An rtx for the computed value is returned. The value is never null.
6566 In the case of a void EXP, const0_rtx is returned.
6568 The value may be stored in TARGET if TARGET is nonzero.
6569 TARGET is just a suggestion; callers must assume that
6570 the rtx returned may not be the same as TARGET.
6572 If TARGET is CONST0_RTX, it means that the value will be ignored.
6574 If TMODE is not VOIDmode, it suggests generating the
6575 result in mode TMODE. But this is done only when convenient.
6576 Otherwise, TMODE is ignored and the value generated in its natural mode.
6577 TMODE is just a suggestion; callers must assume that
6578 the rtx returned may not have mode TMODE.
6580 Note that TARGET may have neither TMODE nor MODE. In that case, it
6581 probably will not be used.
6583 If MODIFIER is EXPAND_SUM then when EXP is an addition
6584 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6585 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6586 products as above, or REG or MEM, or constant.
6587 Ordinarily in such cases we would output mul or add instructions
6588 and then return a pseudo reg containing the sum.
6590 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6591 it also marks a label as absolutely required (it can't be dead).
6592 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6593 This is used for outputting expressions used in initializers.
6595 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6596 with a constant address even if that address is not normally legitimate.
6597 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6599 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6600 a call parameter. Such targets require special care as we haven't yet
6601 marked TARGET so that it's safe from being trashed by libcalls. We
6602 don't want to use TARGET for anything but the final result;
6603 Intermediate values must go elsewhere. Additionally, calls to
6604 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6607 expand_expr (exp, target, tmode, modifier)
6610 enum machine_mode tmode;
6611 enum expand_modifier modifier;
6614 tree type = TREE_TYPE (exp);
6615 int unsignedp = TREE_UNSIGNED (type);
6616 enum machine_mode mode;
6617 enum tree_code code = TREE_CODE (exp);
6619 rtx subtarget, original_target;
6623 /* Handle ERROR_MARK before anybody tries to access its type. */
6624 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6626 op0 = CONST0_RTX (tmode);
6632 mode = TYPE_MODE (type);
6633 /* Use subtarget as the target for operand 0 of a binary operation. */
6634 subtarget = get_subtarget (target);
6635 original_target = target;
6636 ignore = (target == const0_rtx
6637 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6638 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6639 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6640 && TREE_CODE (type) == VOID_TYPE));
6642 /* If we are going to ignore this result, we need only do something
6643 if there is a side-effect somewhere in the expression. If there
6644 is, short-circuit the most common cases here. Note that we must
6645 not call expand_expr with anything but const0_rtx in case this
6646 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6650 if (! TREE_SIDE_EFFECTS (exp))
6653 /* Ensure we reference a volatile object even if value is ignored, but
6654 don't do this if all we are doing is taking its address. */
6655 if (TREE_THIS_VOLATILE (exp)
6656 && TREE_CODE (exp) != FUNCTION_DECL
6657 && mode != VOIDmode && mode != BLKmode
6658 && modifier != EXPAND_CONST_ADDRESS)
6660 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6661 if (GET_CODE (temp) == MEM)
6662 temp = copy_to_reg (temp);
6666 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6667 || code == INDIRECT_REF || code == BUFFER_REF)
6668 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6671 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6672 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6674 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6675 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6678 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6679 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6680 /* If the second operand has no side effects, just evaluate
6682 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6684 else if (code == BIT_FIELD_REF)
6686 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6687 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6688 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6695 #ifdef MAX_INTEGER_COMPUTATION_MODE
6696 /* Only check stuff here if the mode we want is different from the mode
6697 of the expression; if it's the same, check_max_integer_computation_mode
6698 will handle it. Do we really need to check this stuff at all? */
6701 && GET_MODE (target) != mode
6702 && TREE_CODE (exp) != INTEGER_CST
6703 && TREE_CODE (exp) != PARM_DECL
6704 && TREE_CODE (exp) != ARRAY_REF
6705 && TREE_CODE (exp) != ARRAY_RANGE_REF
6706 && TREE_CODE (exp) != COMPONENT_REF
6707 && TREE_CODE (exp) != BIT_FIELD_REF
6708 && TREE_CODE (exp) != INDIRECT_REF
6709 && TREE_CODE (exp) != CALL_EXPR
6710 && TREE_CODE (exp) != VAR_DECL
6711 && TREE_CODE (exp) != RTL_EXPR)
6713 enum machine_mode mode = GET_MODE (target);
6715 if (GET_MODE_CLASS (mode) == MODE_INT
6716 && mode > MAX_INTEGER_COMPUTATION_MODE)
6717 internal_error ("unsupported wide integer operation");
6721 && TREE_CODE (exp) != INTEGER_CST
6722 && TREE_CODE (exp) != PARM_DECL
6723 && TREE_CODE (exp) != ARRAY_REF
6724 && TREE_CODE (exp) != ARRAY_RANGE_REF
6725 && TREE_CODE (exp) != COMPONENT_REF
6726 && TREE_CODE (exp) != BIT_FIELD_REF
6727 && TREE_CODE (exp) != INDIRECT_REF
6728 && TREE_CODE (exp) != VAR_DECL
6729 && TREE_CODE (exp) != CALL_EXPR
6730 && TREE_CODE (exp) != RTL_EXPR
6731 && GET_MODE_CLASS (tmode) == MODE_INT
6732 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6733 internal_error ("unsupported wide integer operation");
6735 check_max_integer_computation_mode (exp);
6738 /* If will do cse, generate all results into pseudo registers
6739 since 1) that allows cse to find more things
6740 and 2) otherwise cse could produce an insn the machine
6741 cannot support. An exception is a CONSTRUCTOR into a multi-word
6742 MEM: that's much more likely to be most efficient into the MEM.
6743 Another is a CALL_EXPR which must return in memory. */
6745 if (! cse_not_expected && mode != BLKmode && target
6746 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6747 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6748 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6755 tree function = decl_function_context (exp);
6756 /* Labels in containing functions, or labels used from initializers,
6758 if (modifier == EXPAND_INITIALIZER
6759 || (function != current_function_decl
6760 && function != inline_function_decl
6762 temp = force_label_rtx (exp);
6764 temp = label_rtx (exp);
6766 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6767 if (function != current_function_decl
6768 && function != inline_function_decl && function != 0)
6769 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6774 if (!DECL_RTL_SET_P (exp))
6776 error_with_decl (exp, "prior parameter's size depends on `%s'");
6777 return CONST0_RTX (mode);
6780 /* ... fall through ... */
6783 /* If a static var's type was incomplete when the decl was written,
6784 but the type is complete now, lay out the decl now. */
6785 if (DECL_SIZE (exp) == 0
6786 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6787 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6788 layout_decl (exp, 0);
6790 /* ... fall through ... */
6794 if (DECL_RTL (exp) == 0)
6797 /* Ensure variable marked as used even if it doesn't go through
6798 a parser. If it hasn't be used yet, write out an external
6800 if (! TREE_USED (exp))
6802 assemble_external (exp);
6803 TREE_USED (exp) = 1;
6806 /* Show we haven't gotten RTL for this yet. */
6809 /* Handle variables inherited from containing functions. */
6810 context = decl_function_context (exp);
6812 /* We treat inline_function_decl as an alias for the current function
6813 because that is the inline function whose vars, types, etc.
6814 are being merged into the current function.
6815 See expand_inline_function. */
6817 if (context != 0 && context != current_function_decl
6818 && context != inline_function_decl
6819 /* If var is static, we don't need a static chain to access it. */
6820 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6821 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6825 /* Mark as non-local and addressable. */
6826 DECL_NONLOCAL (exp) = 1;
6827 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6829 (*lang_hooks.mark_addressable) (exp);
6830 if (GET_CODE (DECL_RTL (exp)) != MEM)
6832 addr = XEXP (DECL_RTL (exp), 0);
6833 if (GET_CODE (addr) == MEM)
6835 = replace_equiv_address (addr,
6836 fix_lexical_addr (XEXP (addr, 0), exp));
6838 addr = fix_lexical_addr (addr, exp);
6840 temp = replace_equiv_address (DECL_RTL (exp), addr);
6843 /* This is the case of an array whose size is to be determined
6844 from its initializer, while the initializer is still being parsed.
6847 else if (GET_CODE (DECL_RTL (exp)) == MEM
6848 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6849 temp = validize_mem (DECL_RTL (exp));
6851 /* If DECL_RTL is memory, we are in the normal case and either
6852 the address is not valid or it is not a register and -fforce-addr
6853 is specified, get the address into a register. */
6855 else if (GET_CODE (DECL_RTL (exp)) == MEM
6856 && modifier != EXPAND_CONST_ADDRESS
6857 && modifier != EXPAND_SUM
6858 && modifier != EXPAND_INITIALIZER
6859 && (! memory_address_p (DECL_MODE (exp),
6860 XEXP (DECL_RTL (exp), 0))
6862 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6863 temp = replace_equiv_address (DECL_RTL (exp),
6864 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6866 /* If we got something, return it. But first, set the alignment
6867 if the address is a register. */
6870 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6871 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6876 /* If the mode of DECL_RTL does not match that of the decl, it
6877 must be a promoted value. We return a SUBREG of the wanted mode,
6878 but mark it so that we know that it was already extended. */
6880 if (GET_CODE (DECL_RTL (exp)) == REG
6881 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6883 /* Get the signedness used for this variable. Ensure we get the
6884 same mode we got when the variable was declared. */
6885 if (GET_MODE (DECL_RTL (exp))
6886 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6887 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6890 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6891 SUBREG_PROMOTED_VAR_P (temp) = 1;
6892 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6896 return DECL_RTL (exp);
6899 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6900 TREE_INT_CST_HIGH (exp), mode);
6902 /* ??? If overflow is set, fold will have done an incomplete job,
6903 which can result in (plus xx (const_int 0)), which can get
6904 simplified by validate_replace_rtx during virtual register
6905 instantiation, which can result in unrecognizable insns.
6906 Avoid this by forcing all overflows into registers. */
6907 if (TREE_CONSTANT_OVERFLOW (exp)
6908 && modifier != EXPAND_INITIALIZER)
6909 temp = force_reg (mode, temp);
6914 return const_vector_from_tree (exp);
6917 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6920 /* If optimized, generate immediate CONST_DOUBLE
6921 which will be turned into memory by reload if necessary.
6923 We used to force a register so that loop.c could see it. But
6924 this does not allow gen_* patterns to perform optimizations with
6925 the constants. It also produces two insns in cases like "x = 1.0;".
6926 On most machines, floating-point constants are not permitted in
6927 many insns, so we'd end up copying it to a register in any case.
6929 Now, we do the copying in expand_binop, if appropriate. */
6930 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6931 TYPE_MODE (TREE_TYPE (exp)));
6935 temp = output_constant_def (exp, 1);
6937 /* temp contains a constant address.
6938 On RISC machines where a constant address isn't valid,
6939 make some insns to get that address into a register. */
6940 if (modifier != EXPAND_CONST_ADDRESS
6941 && modifier != EXPAND_INITIALIZER
6942 && modifier != EXPAND_SUM
6943 && (! memory_address_p (mode, XEXP (temp, 0))
6944 || flag_force_addr))
6945 return replace_equiv_address (temp,
6946 copy_rtx (XEXP (temp, 0)));
6949 case EXPR_WITH_FILE_LOCATION:
6952 location_t saved_loc = input_location;
6953 input_filename = EXPR_WFL_FILENAME (exp);
6954 input_line = EXPR_WFL_LINENO (exp);
6955 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6956 emit_line_note (input_filename, input_line);
6957 /* Possibly avoid switching back and forth here. */
6958 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6959 input_location = saved_loc;
6964 context = decl_function_context (exp);
6966 /* If this SAVE_EXPR was at global context, assume we are an
6967 initialization function and move it into our context. */
6969 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6971 /* We treat inline_function_decl as an alias for the current function
6972 because that is the inline function whose vars, types, etc.
6973 are being merged into the current function.
6974 See expand_inline_function. */
6975 if (context == current_function_decl || context == inline_function_decl)
6978 /* If this is non-local, handle it. */
6981 /* The following call just exists to abort if the context is
6982 not of a containing function. */
6983 find_function_data (context);
6985 temp = SAVE_EXPR_RTL (exp);
6986 if (temp && GET_CODE (temp) == REG)
6988 put_var_into_stack (exp, /*rescan=*/true);
6989 temp = SAVE_EXPR_RTL (exp);
6991 if (temp == 0 || GET_CODE (temp) != MEM)
6994 replace_equiv_address (temp,
6995 fix_lexical_addr (XEXP (temp, 0), exp));
6997 if (SAVE_EXPR_RTL (exp) == 0)
6999 if (mode == VOIDmode)
7002 temp = assign_temp (build_qualified_type (type,
7004 | TYPE_QUAL_CONST)),
7007 SAVE_EXPR_RTL (exp) = temp;
7008 if (!optimize && GET_CODE (temp) == REG)
7009 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7012 /* If the mode of TEMP does not match that of the expression, it
7013 must be a promoted value. We pass store_expr a SUBREG of the
7014 wanted mode but mark it so that we know that it was already
7017 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7019 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7020 promote_mode (type, mode, &unsignedp, 0);
7021 SUBREG_PROMOTED_VAR_P (temp) = 1;
7022 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7025 if (temp == const0_rtx)
7026 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7028 store_expr (TREE_OPERAND (exp, 0), temp,
7029 modifier == EXPAND_STACK_PARM ? 2 : 0);
7031 TREE_USED (exp) = 1;
7034 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7035 must be a promoted value. We return a SUBREG of the wanted mode,
7036 but mark it so that we know that it was already extended. */
7038 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7039 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7041 /* Compute the signedness and make the proper SUBREG. */
7042 promote_mode (type, mode, &unsignedp, 0);
7043 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7044 SUBREG_PROMOTED_VAR_P (temp) = 1;
7045 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7049 return SAVE_EXPR_RTL (exp);
7054 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7055 TREE_OPERAND (exp, 0)
7056 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7060 case PLACEHOLDER_EXPR:
7062 tree old_list = placeholder_list;
7063 tree placeholder_expr = 0;
7065 exp = find_placeholder (exp, &placeholder_expr);
7069 placeholder_list = TREE_CHAIN (placeholder_expr);
7070 temp = expand_expr (exp, original_target, tmode, modifier);
7071 placeholder_list = old_list;
7075 case WITH_RECORD_EXPR:
7076 /* Put the object on the placeholder list, expand our first operand,
7077 and pop the list. */
7078 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7080 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7082 placeholder_list = TREE_CHAIN (placeholder_list);
7086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7087 expand_goto (TREE_OPERAND (exp, 0));
7089 expand_computed_goto (TREE_OPERAND (exp, 0));
7093 expand_exit_loop_if_false (NULL,
7094 invert_truthvalue (TREE_OPERAND (exp, 0)));
7097 case LABELED_BLOCK_EXPR:
7098 if (LABELED_BLOCK_BODY (exp))
7099 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7100 /* Should perhaps use expand_label, but this is simpler and safer. */
7101 do_pending_stack_adjust ();
7102 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7105 case EXIT_BLOCK_EXPR:
7106 if (EXIT_BLOCK_RETURN (exp))
7107 sorry ("returned value in block_exit_expr");
7108 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7113 expand_start_loop (1);
7114 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7122 tree vars = TREE_OPERAND (exp, 0);
7124 /* Need to open a binding contour here because
7125 if there are any cleanups they must be contained here. */
7126 expand_start_bindings (2);
7128 /* Mark the corresponding BLOCK for output in its proper place. */
7129 if (TREE_OPERAND (exp, 2) != 0
7130 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7131 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7133 /* If VARS have not yet been expanded, expand them now. */
7136 if (!DECL_RTL_SET_P (vars))
7138 expand_decl_init (vars);
7139 vars = TREE_CHAIN (vars);
7142 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7144 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7150 if (RTL_EXPR_SEQUENCE (exp))
7152 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7154 emit_insn (RTL_EXPR_SEQUENCE (exp));
7155 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7157 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7158 free_temps_for_rtl_expr (exp);
7159 return RTL_EXPR_RTL (exp);
7162 /* If we don't need the result, just ensure we evaluate any
7168 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7169 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7174 /* All elts simple constants => refer to a constant in memory. But
7175 if this is a non-BLKmode mode, let it store a field at a time
7176 since that should make a CONST_INT or CONST_DOUBLE when we
7177 fold. Likewise, if we have a target we can use, it is best to
7178 store directly into the target unless the type is large enough
7179 that memcpy will be used. If we are making an initializer and
7180 all operands are constant, put it in memory as well.
7182 FIXME: Avoid trying to fill vector constructors piece-meal.
7183 Output them with output_constant_def below unless we're sure
7184 they're zeros. This should go away when vector initializers
7185 are treated like VECTOR_CST instead of arrays.
7187 else if ((TREE_STATIC (exp)
7188 && ((mode == BLKmode
7189 && ! (target != 0 && safe_from_p (target, exp, 1)))
7190 || TREE_ADDRESSABLE (exp)
7191 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7192 && (! MOVE_BY_PIECES_P
7193 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7195 && ((TREE_CODE (type) == VECTOR_TYPE
7196 && !is_zeros_p (exp))
7197 || ! mostly_zeros_p (exp)))))
7198 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7200 rtx constructor = output_constant_def (exp, 1);
7202 if (modifier != EXPAND_CONST_ADDRESS
7203 && modifier != EXPAND_INITIALIZER
7204 && modifier != EXPAND_SUM)
7205 constructor = validize_mem (constructor);
7211 /* Handle calls that pass values in multiple non-contiguous
7212 locations. The Irix 6 ABI has examples of this. */
7213 if (target == 0 || ! safe_from_p (target, exp, 1)
7214 || GET_CODE (target) == PARALLEL
7215 || modifier == EXPAND_STACK_PARM)
7217 = assign_temp (build_qualified_type (type,
7219 | (TREE_READONLY (exp)
7220 * TYPE_QUAL_CONST))),
7221 0, TREE_ADDRESSABLE (exp), 1);
7223 store_constructor (exp, target, 0, int_expr_size (exp));
7229 tree exp1 = TREE_OPERAND (exp, 0);
7231 tree string = string_constant (exp1, &index);
7233 /* Try to optimize reads from const strings. */
7235 && TREE_CODE (string) == STRING_CST
7236 && TREE_CODE (index) == INTEGER_CST
7237 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7238 && GET_MODE_CLASS (mode) == MODE_INT
7239 && GET_MODE_SIZE (mode) == 1
7240 && modifier != EXPAND_WRITE)
7241 return gen_int_mode (TREE_STRING_POINTER (string)
7242 [TREE_INT_CST_LOW (index)], mode);
7244 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7245 op0 = memory_address (mode, op0);
7246 temp = gen_rtx_MEM (mode, op0);
7247 set_mem_attributes (temp, exp, 0);
7249 /* If we are writing to this object and its type is a record with
7250 readonly fields, we must mark it as readonly so it will
7251 conflict with readonly references to those fields. */
7252 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7253 RTX_UNCHANGING_P (temp) = 1;
7259 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7263 tree array = TREE_OPERAND (exp, 0);
7264 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7265 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7266 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7269 /* Optimize the special-case of a zero lower bound.
7271 We convert the low_bound to sizetype to avoid some problems
7272 with constant folding. (E.g. suppose the lower bound is 1,
7273 and its mode is QI. Without the conversion, (ARRAY
7274 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7275 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7277 if (! integer_zerop (low_bound))
7278 index = size_diffop (index, convert (sizetype, low_bound));
7280 /* Fold an expression like: "foo"[2].
7281 This is not done in fold so it won't happen inside &.
7282 Don't fold if this is for wide characters since it's too
7283 difficult to do correctly and this is a very rare case. */
7285 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7286 && TREE_CODE (array) == STRING_CST
7287 && TREE_CODE (index) == INTEGER_CST
7288 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7289 && GET_MODE_CLASS (mode) == MODE_INT
7290 && GET_MODE_SIZE (mode) == 1)
7291 return gen_int_mode (TREE_STRING_POINTER (array)
7292 [TREE_INT_CST_LOW (index)], mode);
7294 /* If this is a constant index into a constant array,
7295 just get the value from the array. Handle both the cases when
7296 we have an explicit constructor and when our operand is a variable
7297 that was declared const. */
7299 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7300 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7301 && TREE_CODE (index) == INTEGER_CST
7302 && 0 > compare_tree_int (index,
7303 list_length (CONSTRUCTOR_ELTS
7304 (TREE_OPERAND (exp, 0)))))
7308 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7309 i = TREE_INT_CST_LOW (index);
7310 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7314 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7318 else if (optimize >= 1
7319 && modifier != EXPAND_CONST_ADDRESS
7320 && modifier != EXPAND_INITIALIZER
7321 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7322 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7323 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7325 if (TREE_CODE (index) == INTEGER_CST)
7327 tree init = DECL_INITIAL (array);
7329 if (TREE_CODE (init) == CONSTRUCTOR)
7333 for (elem = CONSTRUCTOR_ELTS (init);
7335 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7336 elem = TREE_CHAIN (elem))
7339 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7340 return expand_expr (fold (TREE_VALUE (elem)), target,
7343 else if (TREE_CODE (init) == STRING_CST
7344 && 0 > compare_tree_int (index,
7345 TREE_STRING_LENGTH (init)))
7347 tree type = TREE_TYPE (TREE_TYPE (init));
7348 enum machine_mode mode = TYPE_MODE (type);
7350 if (GET_MODE_CLASS (mode) == MODE_INT
7351 && GET_MODE_SIZE (mode) == 1)
7352 return gen_int_mode (TREE_STRING_POINTER (init)
7353 [TREE_INT_CST_LOW (index)], mode);
7358 goto normal_inner_ref;
7361 /* If the operand is a CONSTRUCTOR, we can just extract the
7362 appropriate field if it is present. */
7363 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7367 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7368 elt = TREE_CHAIN (elt))
7369 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7370 /* We can normally use the value of the field in the
7371 CONSTRUCTOR. However, if this is a bitfield in
7372 an integral mode that we can fit in a HOST_WIDE_INT,
7373 we must mask only the number of bits in the bitfield,
7374 since this is done implicitly by the constructor. If
7375 the bitfield does not meet either of those conditions,
7376 we can't do this optimization. */
7377 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7378 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7380 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7381 <= HOST_BITS_PER_WIDE_INT))))
7383 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7384 && modifier == EXPAND_STACK_PARM)
7386 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7387 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7389 HOST_WIDE_INT bitsize
7390 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7391 enum machine_mode imode
7392 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7394 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7396 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7397 op0 = expand_and (imode, op0, op1, target);
7402 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7405 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7407 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7415 goto normal_inner_ref;
7418 case ARRAY_RANGE_REF:
7421 enum machine_mode mode1;
7422 HOST_WIDE_INT bitsize, bitpos;
7425 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7426 &mode1, &unsignedp, &volatilep);
7429 /* If we got back the original object, something is wrong. Perhaps
7430 we are evaluating an expression too early. In any event, don't
7431 infinitely recurse. */
7435 /* If TEM's type is a union of variable size, pass TARGET to the inner
7436 computation, since it will need a temporary and TARGET is known
7437 to have to do. This occurs in unchecked conversion in Ada. */
7441 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7442 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7444 && modifier != EXPAND_STACK_PARM
7445 ? target : NULL_RTX),
7447 (modifier == EXPAND_INITIALIZER
7448 || modifier == EXPAND_CONST_ADDRESS
7449 || modifier == EXPAND_STACK_PARM)
7450 ? modifier : EXPAND_NORMAL);
7452 /* If this is a constant, put it into a register if it is a
7453 legitimate constant and OFFSET is 0 and memory if it isn't. */
7454 if (CONSTANT_P (op0))
7456 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7457 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7459 op0 = force_reg (mode, op0);
7461 op0 = validize_mem (force_const_mem (mode, op0));
7466 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7469 /* If this object is in a register, put it into memory.
7470 This case can't occur in C, but can in Ada if we have
7471 unchecked conversion of an expression from a scalar type to
7472 an array or record type. */
7473 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7474 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7476 /* If the operand is a SAVE_EXPR, we can deal with this by
7477 forcing the SAVE_EXPR into memory. */
7478 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7480 put_var_into_stack (TREE_OPERAND (exp, 0),
7482 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7487 = build_qualified_type (TREE_TYPE (tem),
7488 (TYPE_QUALS (TREE_TYPE (tem))
7489 | TYPE_QUAL_CONST));
7490 rtx memloc = assign_temp (nt, 1, 1, 1);
7492 emit_move_insn (memloc, op0);
7497 if (GET_CODE (op0) != MEM)
7500 #ifdef POINTERS_EXTEND_UNSIGNED
7501 if (GET_MODE (offset_rtx) != Pmode)
7502 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7504 if (GET_MODE (offset_rtx) != ptr_mode)
7505 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7508 /* A constant address in OP0 can have VOIDmode, we must not try
7509 to call force_reg for that case. Avoid that case. */
7510 if (GET_CODE (op0) == MEM
7511 && GET_MODE (op0) == BLKmode
7512 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7514 && (bitpos % bitsize) == 0
7515 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7516 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7518 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7522 op0 = offset_address (op0, offset_rtx,
7523 highest_pow2_factor (offset));
7526 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7527 record its alignment as BIGGEST_ALIGNMENT. */
7528 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7529 && is_aligning_offset (offset, tem))
7530 set_mem_align (op0, BIGGEST_ALIGNMENT);
7532 /* Don't forget about volatility even if this is a bitfield. */
7533 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7535 if (op0 == orig_op0)
7536 op0 = copy_rtx (op0);
7538 MEM_VOLATILE_P (op0) = 1;
7541 /* The following code doesn't handle CONCAT.
7542 Assume only bitpos == 0 can be used for CONCAT, due to
7543 one element arrays having the same mode as its element. */
7544 if (GET_CODE (op0) == CONCAT)
7546 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7551 /* In cases where an aligned union has an unaligned object
7552 as a field, we might be extracting a BLKmode value from
7553 an integer-mode (e.g., SImode) object. Handle this case
7554 by doing the extract into an object as wide as the field
7555 (which we know to be the width of a basic mode), then
7556 storing into memory, and changing the mode to BLKmode. */
7557 if (mode1 == VOIDmode
7558 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7559 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7560 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7562 && modifier != EXPAND_CONST_ADDRESS
7563 && modifier != EXPAND_INITIALIZER)
7564 /* If the field isn't aligned enough to fetch as a memref,
7565 fetch it as a bit field. */
7566 || (mode1 != BLKmode
7567 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7568 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7569 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7570 || (bitpos % BITS_PER_UNIT != 0)))
7571 /* If the type and the field are a constant size and the
7572 size of the type isn't the same size as the bitfield,
7573 we must use bitfield operations. */
7575 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7577 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7580 enum machine_mode ext_mode = mode;
7582 if (ext_mode == BLKmode
7583 && ! (target != 0 && GET_CODE (op0) == MEM
7584 && GET_CODE (target) == MEM
7585 && bitpos % BITS_PER_UNIT == 0))
7586 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7588 if (ext_mode == BLKmode)
7590 /* In this case, BITPOS must start at a byte boundary and
7591 TARGET, if specified, must be a MEM. */
7592 if (GET_CODE (op0) != MEM
7593 || (target != 0 && GET_CODE (target) != MEM)
7594 || bitpos % BITS_PER_UNIT != 0)
7597 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7599 target = assign_temp (type, 0, 1, 1);
7601 emit_block_move (target, op0,
7602 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7604 (modifier == EXPAND_STACK_PARM
7605 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7610 op0 = validize_mem (op0);
7612 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7613 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7615 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7616 (modifier == EXPAND_STACK_PARM
7617 ? NULL_RTX : target),
7619 int_size_in_bytes (TREE_TYPE (tem)));
7621 /* If the result is a record type and BITSIZE is narrower than
7622 the mode of OP0, an integral mode, and this is a big endian
7623 machine, we must put the field into the high-order bits. */
7624 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7625 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7626 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7627 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7628 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7632 if (mode == BLKmode)
7634 rtx new = assign_temp (build_qualified_type
7635 ((*lang_hooks.types.type_for_mode)
7637 TYPE_QUAL_CONST), 0, 1, 1);
7639 emit_move_insn (new, op0);
7640 op0 = copy_rtx (new);
7641 PUT_MODE (op0, BLKmode);
7642 set_mem_attributes (op0, exp, 1);
7648 /* If the result is BLKmode, use that to access the object
7650 if (mode == BLKmode)
7653 /* Get a reference to just this component. */
7654 if (modifier == EXPAND_CONST_ADDRESS
7655 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7656 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7658 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7660 if (op0 == orig_op0)
7661 op0 = copy_rtx (op0);
7663 set_mem_attributes (op0, exp, 0);
7664 if (GET_CODE (XEXP (op0, 0)) == REG)
7665 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7667 MEM_VOLATILE_P (op0) |= volatilep;
7668 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7669 || modifier == EXPAND_CONST_ADDRESS
7670 || modifier == EXPAND_INITIALIZER)
7672 else if (target == 0)
7673 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7675 convert_move (target, op0, unsignedp);
7681 rtx insn, before = get_last_insn (), vtbl_ref;
7683 /* Evaluate the interior expression. */
7684 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7687 /* Get or create an instruction off which to hang a note. */
7688 if (REG_P (subtarget))
7691 insn = get_last_insn ();
7694 if (! INSN_P (insn))
7695 insn = prev_nonnote_insn (insn);
7699 target = gen_reg_rtx (GET_MODE (subtarget));
7700 insn = emit_move_insn (target, subtarget);
7703 /* Collect the data for the note. */
7704 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7705 vtbl_ref = plus_constant (vtbl_ref,
7706 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7707 /* Discard the initial CONST that was added. */
7708 vtbl_ref = XEXP (vtbl_ref, 0);
7711 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7716 /* Intended for a reference to a buffer of a file-object in Pascal.
7717 But it's not certain that a special tree code will really be
7718 necessary for these. INDIRECT_REF might work for them. */
7724 /* Pascal set IN expression.
7727 rlo = set_low - (set_low%bits_per_word);
7728 the_word = set [ (index - rlo)/bits_per_word ];
7729 bit_index = index % bits_per_word;
7730 bitmask = 1 << bit_index;
7731 return !!(the_word & bitmask); */
7733 tree set = TREE_OPERAND (exp, 0);
7734 tree index = TREE_OPERAND (exp, 1);
7735 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7736 tree set_type = TREE_TYPE (set);
7737 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7738 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7739 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7740 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7741 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7742 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7743 rtx setaddr = XEXP (setval, 0);
7744 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7746 rtx diff, quo, rem, addr, bit, result;
7748 /* If domain is empty, answer is no. Likewise if index is constant
7749 and out of bounds. */
7750 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7751 && TREE_CODE (set_low_bound) == INTEGER_CST
7752 && tree_int_cst_lt (set_high_bound, set_low_bound))
7753 || (TREE_CODE (index) == INTEGER_CST
7754 && TREE_CODE (set_low_bound) == INTEGER_CST
7755 && tree_int_cst_lt (index, set_low_bound))
7756 || (TREE_CODE (set_high_bound) == INTEGER_CST
7757 && TREE_CODE (index) == INTEGER_CST
7758 && tree_int_cst_lt (set_high_bound, index))))
7762 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7764 /* If we get here, we have to generate the code for both cases
7765 (in range and out of range). */
7767 op0 = gen_label_rtx ();
7768 op1 = gen_label_rtx ();
7770 if (! (GET_CODE (index_val) == CONST_INT
7771 && GET_CODE (lo_r) == CONST_INT))
7772 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7773 GET_MODE (index_val), iunsignedp, op1);
7775 if (! (GET_CODE (index_val) == CONST_INT
7776 && GET_CODE (hi_r) == CONST_INT))
7777 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7778 GET_MODE (index_val), iunsignedp, op1);
7780 /* Calculate the element number of bit zero in the first word
7782 if (GET_CODE (lo_r) == CONST_INT)
7783 rlow = GEN_INT (INTVAL (lo_r)
7784 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7786 rlow = expand_binop (index_mode, and_optab, lo_r,
7787 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7788 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7790 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7791 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7793 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7794 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7795 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7796 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7798 addr = memory_address (byte_mode,
7799 expand_binop (index_mode, add_optab, diff,
7800 setaddr, NULL_RTX, iunsignedp,
7803 /* Extract the bit we want to examine. */
7804 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7805 gen_rtx_MEM (byte_mode, addr),
7806 make_tree (TREE_TYPE (index), rem),
7808 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7809 GET_MODE (target) == byte_mode ? target : 0,
7810 1, OPTAB_LIB_WIDEN);
7812 if (result != target)
7813 convert_move (target, result, 1);
7815 /* Output the code to handle the out-of-range case. */
7818 emit_move_insn (target, const0_rtx);
7823 case WITH_CLEANUP_EXPR:
7824 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7826 WITH_CLEANUP_EXPR_RTL (exp)
7827 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7828 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7829 CLEANUP_EH_ONLY (exp));
7831 /* That's it for this cleanup. */
7832 TREE_OPERAND (exp, 1) = 0;
7834 return WITH_CLEANUP_EXPR_RTL (exp);
7836 case CLEANUP_POINT_EXPR:
7838 /* Start a new binding layer that will keep track of all cleanup
7839 actions to be performed. */
7840 expand_start_bindings (2);
7842 target_temp_slot_level = temp_slot_level;
7844 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7845 /* If we're going to use this value, load it up now. */
7847 op0 = force_not_mem (op0);
7848 preserve_temp_slots (op0);
7849 expand_end_bindings (NULL_TREE, 0, 0);
7854 /* Check for a built-in function. */
7855 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7856 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7858 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7860 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7861 == BUILT_IN_FRONTEND)
7862 return (*lang_hooks.expand_expr) (exp, original_target,
7865 return expand_builtin (exp, target, subtarget, tmode, ignore);
7868 return expand_call (exp, target, ignore);
7870 case NON_LVALUE_EXPR:
7873 case REFERENCE_EXPR:
7874 if (TREE_OPERAND (exp, 0) == error_mark_node)
7877 if (TREE_CODE (type) == UNION_TYPE)
7879 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7881 /* If both input and output are BLKmode, this conversion isn't doing
7882 anything except possibly changing memory attribute. */
7883 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7885 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7888 result = copy_rtx (result);
7889 set_mem_attributes (result, exp, 0);
7894 target = assign_temp (type, 0, 1, 1);
7896 if (GET_CODE (target) == MEM)
7897 /* Store data into beginning of memory target. */
7898 store_expr (TREE_OPERAND (exp, 0),
7899 adjust_address (target, TYPE_MODE (valtype), 0),
7900 modifier == EXPAND_STACK_PARM ? 2 : 0);
7902 else if (GET_CODE (target) == REG)
7903 /* Store this field into a union of the proper type. */
7904 store_field (target,
7905 MIN ((int_size_in_bytes (TREE_TYPE
7906 (TREE_OPERAND (exp, 0)))
7908 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7909 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7910 VOIDmode, 0, type, 0);
7914 /* Return the entire union. */
7918 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7923 /* If the signedness of the conversion differs and OP0 is
7924 a promoted SUBREG, clear that indication since we now
7925 have to do the proper extension. */
7926 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7927 && GET_CODE (op0) == SUBREG)
7928 SUBREG_PROMOTED_VAR_P (op0) = 0;
7933 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7934 if (GET_MODE (op0) == mode)
7937 /* If OP0 is a constant, just convert it into the proper mode. */
7938 if (CONSTANT_P (op0))
7940 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7941 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7943 if (modifier == EXPAND_INITIALIZER)
7944 return simplify_gen_subreg (mode, op0, inner_mode,
7945 subreg_lowpart_offset (mode,
7948 return convert_modes (mode, inner_mode, op0,
7949 TREE_UNSIGNED (inner_type));
7952 if (modifier == EXPAND_INITIALIZER)
7953 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7957 convert_to_mode (mode, op0,
7958 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7960 convert_move (target, op0,
7961 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7964 case VIEW_CONVERT_EXPR:
7965 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7967 /* If the input and output modes are both the same, we are done.
7968 Otherwise, if neither mode is BLKmode and both are integral and within
7969 a word, we can use gen_lowpart. If neither is true, make sure the
7970 operand is in memory and convert the MEM to the new mode. */
7971 if (TYPE_MODE (type) == GET_MODE (op0))
7973 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7974 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7975 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7976 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7977 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7978 op0 = gen_lowpart (TYPE_MODE (type), op0);
7979 else if (GET_CODE (op0) != MEM)
7981 /* If the operand is not a MEM, force it into memory. Since we
7982 are going to be be changing the mode of the MEM, don't call
7983 force_const_mem for constants because we don't allow pool
7984 constants to change mode. */
7985 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7987 if (TREE_ADDRESSABLE (exp))
7990 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7992 = assign_stack_temp_for_type
7993 (TYPE_MODE (inner_type),
7994 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7996 emit_move_insn (target, op0);
8000 /* At this point, OP0 is in the correct mode. If the output type is such
8001 that the operand is known to be aligned, indicate that it is.
8002 Otherwise, we need only be concerned about alignment for non-BLKmode
8004 if (GET_CODE (op0) == MEM)
8006 op0 = copy_rtx (op0);
8008 if (TYPE_ALIGN_OK (type))
8009 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8010 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8011 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8013 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8014 HOST_WIDE_INT temp_size
8015 = MAX (int_size_in_bytes (inner_type),
8016 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8017 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8018 temp_size, 0, type);
8019 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8021 if (TREE_ADDRESSABLE (exp))
8024 if (GET_MODE (op0) == BLKmode)
8025 emit_block_move (new_with_op0_mode, op0,
8026 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8027 (modifier == EXPAND_STACK_PARM
8028 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8030 emit_move_insn (new_with_op0_mode, op0);
8035 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8041 this_optab = ! unsignedp && flag_trapv
8042 && (GET_MODE_CLASS (mode) == MODE_INT)
8043 ? addv_optab : add_optab;
8045 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8046 something else, make sure we add the register to the constant and
8047 then to the other thing. This case can occur during strength
8048 reduction and doing it this way will produce better code if the
8049 frame pointer or argument pointer is eliminated.
8051 fold-const.c will ensure that the constant is always in the inner
8052 PLUS_EXPR, so the only case we need to do anything about is if
8053 sp, ap, or fp is our second argument, in which case we must swap
8054 the innermost first argument and our second argument. */
8056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8057 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8058 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8059 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8060 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8061 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8063 tree t = TREE_OPERAND (exp, 1);
8065 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8066 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8069 /* If the result is to be ptr_mode and we are adding an integer to
8070 something, we might be forming a constant. So try to use
8071 plus_constant. If it produces a sum and we can't accept it,
8072 use force_operand. This allows P = &ARR[const] to generate
8073 efficient code on machines where a SYMBOL_REF is not a valid
8076 If this is an EXPAND_SUM call, always return the sum. */
8077 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8078 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8080 if (modifier == EXPAND_STACK_PARM)
8082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8083 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8084 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8088 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8090 /* Use immed_double_const to ensure that the constant is
8091 truncated according to the mode of OP1, then sign extended
8092 to a HOST_WIDE_INT. Using the constant directly can result
8093 in non-canonical RTL in a 64x32 cross compile. */
8095 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8097 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8098 op1 = plus_constant (op1, INTVAL (constant_part));
8099 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8100 op1 = force_operand (op1, target);
8104 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8105 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8106 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8111 (modifier == EXPAND_INITIALIZER
8112 ? EXPAND_INITIALIZER : EXPAND_SUM));
8113 if (! CONSTANT_P (op0))
8115 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8116 VOIDmode, modifier);
8117 /* Don't go to both_summands if modifier
8118 says it's not right to return a PLUS. */
8119 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8123 /* Use immed_double_const to ensure that the constant is
8124 truncated according to the mode of OP1, then sign extended
8125 to a HOST_WIDE_INT. Using the constant directly can result
8126 in non-canonical RTL in a 64x32 cross compile. */
8128 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8130 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8131 op0 = plus_constant (op0, INTVAL (constant_part));
8132 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8133 op0 = force_operand (op0, target);
8138 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8141 /* No sense saving up arithmetic to be done
8142 if it's all in the wrong mode to form part of an address.
8143 And force_operand won't know whether to sign-extend or
8145 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8146 || mode != ptr_mode)
8148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8150 if (op0 == const0_rtx)
8152 if (op1 == const0_rtx)
8157 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8158 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8160 /* We come here from MINUS_EXPR when the second operand is a
8163 /* Make sure any term that's a sum with a constant comes last. */
8164 if (GET_CODE (op0) == PLUS
8165 && CONSTANT_P (XEXP (op0, 1)))
8171 /* If adding to a sum including a constant,
8172 associate it to put the constant outside. */
8173 if (GET_CODE (op1) == PLUS
8174 && CONSTANT_P (XEXP (op1, 1)))
8176 rtx constant_term = const0_rtx;
8178 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8181 /* Ensure that MULT comes first if there is one. */
8182 else if (GET_CODE (op0) == MULT)
8183 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8185 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8187 /* Let's also eliminate constants from op0 if possible. */
8188 op0 = eliminate_constant_term (op0, &constant_term);
8190 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8191 their sum should be a constant. Form it into OP1, since the
8192 result we want will then be OP0 + OP1. */
8194 temp = simplify_binary_operation (PLUS, mode, constant_term,
8199 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8202 /* Put a constant term last and put a multiplication first. */
8203 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8204 temp = op1, op1 = op0, op0 = temp;
8206 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8207 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8210 /* For initializers, we are allowed to return a MINUS of two
8211 symbolic constants. Here we handle all cases when both operands
8213 /* Handle difference of two symbolic constants,
8214 for the sake of an initializer. */
8215 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8216 && really_constant_p (TREE_OPERAND (exp, 0))
8217 && really_constant_p (TREE_OPERAND (exp, 1)))
8219 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8221 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8224 /* If the last operand is a CONST_INT, use plus_constant of
8225 the negated constant. Else make the MINUS. */
8226 if (GET_CODE (op1) == CONST_INT)
8227 return plus_constant (op0, - INTVAL (op1));
8229 return gen_rtx_MINUS (mode, op0, op1);
8232 this_optab = ! unsignedp && flag_trapv
8233 && (GET_MODE_CLASS(mode) == MODE_INT)
8234 ? subv_optab : sub_optab;
8236 /* No sense saving up arithmetic to be done
8237 if it's all in the wrong mode to form part of an address.
8238 And force_operand won't know whether to sign-extend or
8240 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8241 || mode != ptr_mode)
8244 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8248 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8250 /* Convert A - const to A + (-const). */
8251 if (GET_CODE (op1) == CONST_INT)
8253 op1 = negate_rtx (mode, op1);
8260 /* If first operand is constant, swap them.
8261 Thus the following special case checks need only
8262 check the second operand. */
8263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8265 tree t1 = TREE_OPERAND (exp, 0);
8266 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8267 TREE_OPERAND (exp, 1) = t1;
8270 /* Attempt to return something suitable for generating an
8271 indexed address, for machines that support that. */
8273 if (modifier == EXPAND_SUM && mode == ptr_mode
8274 && host_integerp (TREE_OPERAND (exp, 1), 0))
8276 tree exp1 = TREE_OPERAND (exp, 1);
8278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8281 /* If we knew for certain that this is arithmetic for an array
8282 reference, and we knew the bounds of the array, then we could
8283 apply the distributive law across (PLUS X C) for constant C.
8284 Without such knowledge, we risk overflowing the computation
8285 when both X and C are large, but X+C isn't. */
8286 /* ??? Could perhaps special-case EXP being unsigned and C being
8287 positive. In that case we are certain that X+C is no smaller
8288 than X and so the transformed expression will overflow iff the
8289 original would have. */
8291 if (GET_CODE (op0) != REG)
8292 op0 = force_operand (op0, NULL_RTX);
8293 if (GET_CODE (op0) != REG)
8294 op0 = copy_to_mode_reg (mode, op0);
8296 return gen_rtx_MULT (mode, op0,
8297 gen_int_mode (tree_low_cst (exp1, 0),
8298 TYPE_MODE (TREE_TYPE (exp1))));
8301 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8304 if (modifier == EXPAND_STACK_PARM)
8307 /* Check for multiplying things that have been extended
8308 from a narrower type. If this machine supports multiplying
8309 in that narrower type with a result in the desired type,
8310 do it that way, and avoid the explicit type-conversion. */
8311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8312 && TREE_CODE (type) == INTEGER_TYPE
8313 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8314 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8315 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8316 && int_fits_type_p (TREE_OPERAND (exp, 1),
8317 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8318 /* Don't use a widening multiply if a shift will do. */
8319 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8320 > HOST_BITS_PER_WIDE_INT)
8321 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8323 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8324 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8326 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8327 /* If both operands are extended, they must either both
8328 be zero-extended or both be sign-extended. */
8329 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8331 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8333 enum machine_mode innermode
8334 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8335 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8336 ? smul_widen_optab : umul_widen_optab);
8337 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8338 ? umul_widen_optab : smul_widen_optab);
8339 if (mode == GET_MODE_WIDER_MODE (innermode))
8341 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8343 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8344 NULL_RTX, VOIDmode, 0);
8345 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8346 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8349 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8350 NULL_RTX, VOIDmode, 0);
8353 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8354 && innermode == word_mode)
8357 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8358 NULL_RTX, VOIDmode, 0);
8359 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8360 op1 = convert_modes (innermode, mode,
8361 expand_expr (TREE_OPERAND (exp, 1),
8362 NULL_RTX, VOIDmode, 0),
8365 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8366 NULL_RTX, VOIDmode, 0);
8367 temp = expand_binop (mode, other_optab, op0, op1, target,
8368 unsignedp, OPTAB_LIB_WIDEN);
8369 htem = expand_mult_highpart_adjust (innermode,
8370 gen_highpart (innermode, temp),
8372 gen_highpart (innermode, temp),
8374 emit_move_insn (gen_highpart (innermode, temp), htem);
8379 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8380 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8381 return expand_mult (mode, op0, op1, target, unsignedp);
8383 case TRUNC_DIV_EXPR:
8384 case FLOOR_DIV_EXPR:
8386 case ROUND_DIV_EXPR:
8387 case EXACT_DIV_EXPR:
8388 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8390 if (modifier == EXPAND_STACK_PARM)
8392 /* Possible optimization: compute the dividend with EXPAND_SUM
8393 then if the divisor is constant can optimize the case
8394 where some terms of the dividend have coeffs divisible by it. */
8395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8397 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8400 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8401 expensive divide. If not, combine will rebuild the original
8403 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8404 && TREE_CODE (type) == REAL_TYPE
8405 && !real_onep (TREE_OPERAND (exp, 0)))
8406 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8407 build (RDIV_EXPR, type,
8408 build_real (type, dconst1),
8409 TREE_OPERAND (exp, 1))),
8410 target, tmode, modifier);
8411 this_optab = sdiv_optab;
8414 case TRUNC_MOD_EXPR:
8415 case FLOOR_MOD_EXPR:
8417 case ROUND_MOD_EXPR:
8418 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8420 if (modifier == EXPAND_STACK_PARM)
8422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8424 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8426 case FIX_ROUND_EXPR:
8427 case FIX_FLOOR_EXPR:
8429 abort (); /* Not used for C. */
8431 case FIX_TRUNC_EXPR:
8432 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8433 if (target == 0 || modifier == EXPAND_STACK_PARM)
8434 target = gen_reg_rtx (mode);
8435 expand_fix (target, op0, unsignedp);
8439 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8440 if (target == 0 || modifier == EXPAND_STACK_PARM)
8441 target = gen_reg_rtx (mode);
8442 /* expand_float can't figure out what to do if FROM has VOIDmode.
8443 So give it the correct mode. With -O, cse will optimize this. */
8444 if (GET_MODE (op0) == VOIDmode)
8445 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8447 expand_float (target, op0,
8448 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8453 if (modifier == EXPAND_STACK_PARM)
8455 temp = expand_unop (mode,
8456 ! unsignedp && flag_trapv
8457 && (GET_MODE_CLASS(mode) == MODE_INT)
8458 ? negv_optab : neg_optab, op0, target, 0);
8464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8465 if (modifier == EXPAND_STACK_PARM)
8468 /* Handle complex values specially. */
8469 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8470 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8471 return expand_complex_abs (mode, op0, target, unsignedp);
8473 /* Unsigned abs is simply the operand. Testing here means we don't
8474 risk generating incorrect code below. */
8475 if (TREE_UNSIGNED (type))
8478 return expand_abs (mode, op0, target, unsignedp,
8479 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8483 target = original_target;
8485 || modifier == EXPAND_STACK_PARM
8486 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8487 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8488 || GET_MODE (target) != mode
8489 || (GET_CODE (target) == REG
8490 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8491 target = gen_reg_rtx (mode);
8492 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8493 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8495 /* First try to do it with a special MIN or MAX instruction.
8496 If that does not win, use a conditional jump to select the proper
8498 this_optab = (TREE_UNSIGNED (type)
8499 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8500 : (code == MIN_EXPR ? smin_optab : smax_optab));
8502 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8507 /* At this point, a MEM target is no longer useful; we will get better
8510 if (GET_CODE (target) == MEM)
8511 target = gen_reg_rtx (mode);
8514 emit_move_insn (target, op0);
8516 op0 = gen_label_rtx ();
8518 /* If this mode is an integer too wide to compare properly,
8519 compare word by word. Rely on cse to optimize constant cases. */
8520 if (GET_MODE_CLASS (mode) == MODE_INT
8521 && ! can_compare_p (GE, mode, ccp_jump))
8523 if (code == MAX_EXPR)
8524 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8525 target, op1, NULL_RTX, op0);
8527 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8528 op1, target, NULL_RTX, op0);
8532 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8533 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8534 unsignedp, mode, NULL_RTX, NULL_RTX,
8537 emit_move_insn (target, op1);
8542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8543 if (modifier == EXPAND_STACK_PARM)
8545 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8552 if (modifier == EXPAND_STACK_PARM)
8554 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8560 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8561 temp = expand_unop (mode, clz_optab, op0, target, 1);
8567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8568 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8574 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8575 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8582 temp = expand_unop (mode, parity_optab, op0, target, 1);
8587 /* ??? Can optimize bitwise operations with one arg constant.
8588 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8589 and (a bitwise1 b) bitwise2 b (etc)
8590 but that is probably not worth while. */
8592 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8593 boolean values when we want in all cases to compute both of them. In
8594 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8595 as actual zero-or-1 values and then bitwise anding. In cases where
8596 there cannot be any side effects, better code would be made by
8597 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8598 how to recognize those cases. */
8600 case TRUTH_AND_EXPR:
8602 this_optab = and_optab;
8607 this_optab = ior_optab;
8610 case TRUTH_XOR_EXPR:
8612 this_optab = xor_optab;
8619 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8621 if (modifier == EXPAND_STACK_PARM)
8623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8624 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8627 /* Could determine the answer when only additive constants differ. Also,
8628 the addition of one can be handled by changing the condition. */
8635 case UNORDERED_EXPR:
8642 temp = do_store_flag (exp,
8643 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8644 tmode != VOIDmode ? tmode : mode, 0);
8648 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8649 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8651 && GET_CODE (original_target) == REG
8652 && (GET_MODE (original_target)
8653 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8655 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8658 /* If temp is constant, we can just compute the result. */
8659 if (GET_CODE (temp) == CONST_INT)
8661 if (INTVAL (temp) != 0)
8662 emit_move_insn (target, const1_rtx);
8664 emit_move_insn (target, const0_rtx);
8669 if (temp != original_target)
8671 enum machine_mode mode1 = GET_MODE (temp);
8672 if (mode1 == VOIDmode)
8673 mode1 = tmode != VOIDmode ? tmode : mode;
8675 temp = copy_to_mode_reg (mode1, temp);
8678 op1 = gen_label_rtx ();
8679 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8680 GET_MODE (temp), unsignedp, op1);
8681 emit_move_insn (temp, const1_rtx);
8686 /* If no set-flag instruction, must generate a conditional
8687 store into a temporary variable. Drop through
8688 and handle this like && and ||. */
8690 case TRUTH_ANDIF_EXPR:
8691 case TRUTH_ORIF_EXPR:
8694 || modifier == EXPAND_STACK_PARM
8695 || ! safe_from_p (target, exp, 1)
8696 /* Make sure we don't have a hard reg (such as function's return
8697 value) live across basic blocks, if not optimizing. */
8698 || (!optimize && GET_CODE (target) == REG
8699 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8700 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8703 emit_clr_insn (target);
8705 op1 = gen_label_rtx ();
8706 jumpifnot (exp, op1);
8709 emit_0_to_1_insn (target);
8712 return ignore ? const0_rtx : target;
8714 case TRUTH_NOT_EXPR:
8715 if (modifier == EXPAND_STACK_PARM)
8717 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8718 /* The parser is careful to generate TRUTH_NOT_EXPR
8719 only with operands that are always zero or one. */
8720 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8721 target, 1, OPTAB_LIB_WIDEN);
8727 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8729 return expand_expr (TREE_OPERAND (exp, 1),
8730 (ignore ? const0_rtx : target),
8731 VOIDmode, modifier);
8734 /* If we would have a "singleton" (see below) were it not for a
8735 conversion in each arm, bring that conversion back out. */
8736 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8737 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8738 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8739 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8741 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8742 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8744 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8745 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8746 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8747 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8748 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8749 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8750 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8751 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8752 return expand_expr (build1 (NOP_EXPR, type,
8753 build (COND_EXPR, TREE_TYPE (iftrue),
8754 TREE_OPERAND (exp, 0),
8756 target, tmode, modifier);
8760 /* Note that COND_EXPRs whose type is a structure or union
8761 are required to be constructed to contain assignments of
8762 a temporary variable, so that we can evaluate them here
8763 for side effect only. If type is void, we must do likewise. */
8765 /* If an arm of the branch requires a cleanup,
8766 only that cleanup is performed. */
8769 tree binary_op = 0, unary_op = 0;
8771 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8772 convert it to our mode, if necessary. */
8773 if (integer_onep (TREE_OPERAND (exp, 1))
8774 && integer_zerop (TREE_OPERAND (exp, 2))
8775 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8779 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8784 if (modifier == EXPAND_STACK_PARM)
8786 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8787 if (GET_MODE (op0) == mode)
8791 target = gen_reg_rtx (mode);
8792 convert_move (target, op0, unsignedp);
8796 /* Check for X ? A + B : A. If we have this, we can copy A to the
8797 output and conditionally add B. Similarly for unary operations.
8798 Don't do this if X has side-effects because those side effects
8799 might affect A or B and the "?" operation is a sequence point in
8800 ANSI. (operand_equal_p tests for side effects.) */
8802 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8803 && operand_equal_p (TREE_OPERAND (exp, 2),
8804 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8805 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8806 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8807 && operand_equal_p (TREE_OPERAND (exp, 1),
8808 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8809 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8810 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8811 && operand_equal_p (TREE_OPERAND (exp, 2),
8812 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8813 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8814 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8815 && operand_equal_p (TREE_OPERAND (exp, 1),
8816 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8817 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8819 /* If we are not to produce a result, we have no target. Otherwise,
8820 if a target was specified use it; it will not be used as an
8821 intermediate target unless it is safe. If no target, use a
8826 else if (modifier == EXPAND_STACK_PARM)
8827 temp = assign_temp (type, 0, 0, 1);
8828 else if (original_target
8829 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8830 || (singleton && GET_CODE (original_target) == REG
8831 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8832 && original_target == var_rtx (singleton)))
8833 && GET_MODE (original_target) == mode
8834 #ifdef HAVE_conditional_move
8835 && (! can_conditionally_move_p (mode)
8836 || GET_CODE (original_target) == REG
8837 || TREE_ADDRESSABLE (type))
8839 && (GET_CODE (original_target) != MEM
8840 || TREE_ADDRESSABLE (type)))
8841 temp = original_target;
8842 else if (TREE_ADDRESSABLE (type))
8845 temp = assign_temp (type, 0, 0, 1);
8847 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8848 do the test of X as a store-flag operation, do this as
8849 A + ((X != 0) << log C). Similarly for other simple binary
8850 operators. Only do for C == 1 if BRANCH_COST is low. */
8851 if (temp && singleton && binary_op
8852 && (TREE_CODE (binary_op) == PLUS_EXPR
8853 || TREE_CODE (binary_op) == MINUS_EXPR
8854 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8855 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8856 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8857 : integer_onep (TREE_OPERAND (binary_op, 1)))
8858 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8862 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8863 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8864 ? addv_optab : add_optab)
8865 : TREE_CODE (binary_op) == MINUS_EXPR
8866 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8867 ? subv_optab : sub_optab)
8868 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8871 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8872 if (singleton == TREE_OPERAND (exp, 1))
8873 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8875 cond = TREE_OPERAND (exp, 0);
8877 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8879 mode, BRANCH_COST <= 1);
8881 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8882 result = expand_shift (LSHIFT_EXPR, mode, result,
8883 build_int_2 (tree_log2
8887 (safe_from_p (temp, singleton, 1)
8888 ? temp : NULL_RTX), 0);
8892 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8893 return expand_binop (mode, boptab, op1, result, temp,
8894 unsignedp, OPTAB_LIB_WIDEN);
8898 do_pending_stack_adjust ();
8900 op0 = gen_label_rtx ();
8902 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8906 /* If the target conflicts with the other operand of the
8907 binary op, we can't use it. Also, we can't use the target
8908 if it is a hard register, because evaluating the condition
8909 might clobber it. */
8911 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8912 || (GET_CODE (temp) == REG
8913 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8914 temp = gen_reg_rtx (mode);
8915 store_expr (singleton, temp,
8916 modifier == EXPAND_STACK_PARM ? 2 : 0);
8919 expand_expr (singleton,
8920 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8921 if (singleton == TREE_OPERAND (exp, 1))
8922 jumpif (TREE_OPERAND (exp, 0), op0);
8924 jumpifnot (TREE_OPERAND (exp, 0), op0);
8926 start_cleanup_deferral ();
8927 if (binary_op && temp == 0)
8928 /* Just touch the other operand. */
8929 expand_expr (TREE_OPERAND (binary_op, 1),
8930 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8932 store_expr (build (TREE_CODE (binary_op), type,
8933 make_tree (type, temp),
8934 TREE_OPERAND (binary_op, 1)),
8935 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8937 store_expr (build1 (TREE_CODE (unary_op), type,
8938 make_tree (type, temp)),
8939 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8942 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8943 comparison operator. If we have one of these cases, set the
8944 output to A, branch on A (cse will merge these two references),
8945 then set the output to FOO. */
8947 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8948 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8949 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8950 TREE_OPERAND (exp, 1), 0)
8951 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8952 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8953 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8955 if (GET_CODE (temp) == REG
8956 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8957 temp = gen_reg_rtx (mode);
8958 store_expr (TREE_OPERAND (exp, 1), temp,
8959 modifier == EXPAND_STACK_PARM ? 2 : 0);
8960 jumpif (TREE_OPERAND (exp, 0), op0);
8962 start_cleanup_deferral ();
8963 store_expr (TREE_OPERAND (exp, 2), temp,
8964 modifier == EXPAND_STACK_PARM ? 2 : 0);
8968 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8969 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8970 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8971 TREE_OPERAND (exp, 2), 0)
8972 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8973 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8974 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8976 if (GET_CODE (temp) == REG
8977 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8978 temp = gen_reg_rtx (mode);
8979 store_expr (TREE_OPERAND (exp, 2), temp,
8980 modifier == EXPAND_STACK_PARM ? 2 : 0);
8981 jumpifnot (TREE_OPERAND (exp, 0), op0);
8983 start_cleanup_deferral ();
8984 store_expr (TREE_OPERAND (exp, 1), temp,
8985 modifier == EXPAND_STACK_PARM ? 2 : 0);
8990 op1 = gen_label_rtx ();
8991 jumpifnot (TREE_OPERAND (exp, 0), op0);
8993 start_cleanup_deferral ();
8995 /* One branch of the cond can be void, if it never returns. For
8996 example A ? throw : E */
8998 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8999 store_expr (TREE_OPERAND (exp, 1), temp,
9000 modifier == EXPAND_STACK_PARM ? 2 : 0);
9002 expand_expr (TREE_OPERAND (exp, 1),
9003 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9004 end_cleanup_deferral ();
9006 emit_jump_insn (gen_jump (op1));
9009 start_cleanup_deferral ();
9011 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9012 store_expr (TREE_OPERAND (exp, 2), temp,
9013 modifier == EXPAND_STACK_PARM ? 2 : 0);
9015 expand_expr (TREE_OPERAND (exp, 2),
9016 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9019 end_cleanup_deferral ();
9030 /* Something needs to be initialized, but we didn't know
9031 where that thing was when building the tree. For example,
9032 it could be the return value of a function, or a parameter
9033 to a function which lays down in the stack, or a temporary
9034 variable which must be passed by reference.
9036 We guarantee that the expression will either be constructed
9037 or copied into our original target. */
9039 tree slot = TREE_OPERAND (exp, 0);
9040 tree cleanups = NULL_TREE;
9043 if (TREE_CODE (slot) != VAR_DECL)
9047 target = original_target;
9049 /* Set this here so that if we get a target that refers to a
9050 register variable that's already been used, put_reg_into_stack
9051 knows that it should fix up those uses. */
9052 TREE_USED (slot) = 1;
9056 if (DECL_RTL_SET_P (slot))
9058 target = DECL_RTL (slot);
9059 /* If we have already expanded the slot, so don't do
9061 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9066 target = assign_temp (type, 2, 0, 1);
9067 /* All temp slots at this level must not conflict. */
9068 preserve_temp_slots (target);
9069 SET_DECL_RTL (slot, target);
9070 if (TREE_ADDRESSABLE (slot))
9071 put_var_into_stack (slot, /*rescan=*/false);
9073 /* Since SLOT is not known to the called function
9074 to belong to its stack frame, we must build an explicit
9075 cleanup. This case occurs when we must build up a reference
9076 to pass the reference as an argument. In this case,
9077 it is very likely that such a reference need not be
9080 if (TREE_OPERAND (exp, 2) == 0)
9081 TREE_OPERAND (exp, 2)
9082 = (*lang_hooks.maybe_build_cleanup) (slot);
9083 cleanups = TREE_OPERAND (exp, 2);
9088 /* This case does occur, when expanding a parameter which
9089 needs to be constructed on the stack. The target
9090 is the actual stack address that we want to initialize.
9091 The function we call will perform the cleanup in this case. */
9093 /* If we have already assigned it space, use that space,
9094 not target that we were passed in, as our target
9095 parameter is only a hint. */
9096 if (DECL_RTL_SET_P (slot))
9098 target = DECL_RTL (slot);
9099 /* If we have already expanded the slot, so don't do
9101 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9106 SET_DECL_RTL (slot, target);
9107 /* If we must have an addressable slot, then make sure that
9108 the RTL that we just stored in slot is OK. */
9109 if (TREE_ADDRESSABLE (slot))
9110 put_var_into_stack (slot, /*rescan=*/true);
9114 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9115 /* Mark it as expanded. */
9116 TREE_OPERAND (exp, 1) = NULL_TREE;
9118 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9120 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9127 tree lhs = TREE_OPERAND (exp, 0);
9128 tree rhs = TREE_OPERAND (exp, 1);
9130 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9136 /* If lhs is complex, expand calls in rhs before computing it.
9137 That's so we don't compute a pointer and save it over a
9138 call. If lhs is simple, compute it first so we can give it
9139 as a target if the rhs is just a call. This avoids an
9140 extra temp and copy and that prevents a partial-subsumption
9141 which makes bad code. Actually we could treat
9142 component_ref's of vars like vars. */
9144 tree lhs = TREE_OPERAND (exp, 0);
9145 tree rhs = TREE_OPERAND (exp, 1);
9149 /* Check for |= or &= of a bitfield of size one into another bitfield
9150 of size 1. In this case, (unless we need the result of the
9151 assignment) we can do this more efficiently with a
9152 test followed by an assignment, if necessary.
9154 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9155 things change so we do, this code should be enhanced to
9158 && TREE_CODE (lhs) == COMPONENT_REF
9159 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9160 || TREE_CODE (rhs) == BIT_AND_EXPR)
9161 && TREE_OPERAND (rhs, 0) == lhs
9162 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9163 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9164 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9166 rtx label = gen_label_rtx ();
9168 do_jump (TREE_OPERAND (rhs, 1),
9169 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9170 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9171 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9172 (TREE_CODE (rhs) == BIT_IOR_EXPR
9174 : integer_zero_node)),
9176 do_pending_stack_adjust ();
9181 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9187 if (!TREE_OPERAND (exp, 0))
9188 expand_null_return ();
9190 expand_return (TREE_OPERAND (exp, 0));
9193 case PREINCREMENT_EXPR:
9194 case PREDECREMENT_EXPR:
9195 return expand_increment (exp, 0, ignore);
9197 case POSTINCREMENT_EXPR:
9198 case POSTDECREMENT_EXPR:
9199 /* Faster to treat as pre-increment if result is not used. */
9200 return expand_increment (exp, ! ignore, ignore);
9203 if (modifier == EXPAND_STACK_PARM)
9205 /* Are we taking the address of a nested function? */
9206 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9207 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9208 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9209 && ! TREE_STATIC (exp))
9211 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9212 op0 = force_operand (op0, target);
9214 /* If we are taking the address of something erroneous, just
9216 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9218 /* If we are taking the address of a constant and are at the
9219 top level, we have to use output_constant_def since we can't
9220 call force_const_mem at top level. */
9222 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9223 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9225 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9228 /* We make sure to pass const0_rtx down if we came in with
9229 ignore set, to avoid doing the cleanups twice for something. */
9230 op0 = expand_expr (TREE_OPERAND (exp, 0),
9231 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9232 (modifier == EXPAND_INITIALIZER
9233 ? modifier : EXPAND_CONST_ADDRESS));
9235 /* If we are going to ignore the result, OP0 will have been set
9236 to const0_rtx, so just return it. Don't get confused and
9237 think we are taking the address of the constant. */
9241 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9242 clever and returns a REG when given a MEM. */
9243 op0 = protect_from_queue (op0, 1);
9245 /* We would like the object in memory. If it is a constant, we can
9246 have it be statically allocated into memory. For a non-constant,
9247 we need to allocate some memory and store the value into it. */
9249 if (CONSTANT_P (op0))
9250 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9252 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9253 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9254 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9256 /* If the operand is a SAVE_EXPR, we can deal with this by
9257 forcing the SAVE_EXPR into memory. */
9258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9260 put_var_into_stack (TREE_OPERAND (exp, 0),
9262 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9266 /* If this object is in a register, it can't be BLKmode. */
9267 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9268 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9270 if (GET_CODE (op0) == PARALLEL)
9271 /* Handle calls that pass values in multiple
9272 non-contiguous locations. The Irix 6 ABI has examples
9274 emit_group_store (memloc, op0,
9275 int_size_in_bytes (inner_type));
9277 emit_move_insn (memloc, op0);
9283 if (GET_CODE (op0) != MEM)
9286 mark_temp_addr_taken (op0);
9287 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9289 op0 = XEXP (op0, 0);
9290 #ifdef POINTERS_EXTEND_UNSIGNED
9291 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9292 && mode == ptr_mode)
9293 op0 = convert_memory_address (ptr_mode, op0);
9298 /* If OP0 is not aligned as least as much as the type requires, we
9299 need to make a temporary, copy OP0 to it, and take the address of
9300 the temporary. We want to use the alignment of the type, not of
9301 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9302 the test for BLKmode means that can't happen. The test for
9303 BLKmode is because we never make mis-aligned MEMs with
9306 We don't need to do this at all if the machine doesn't have
9307 strict alignment. */
9308 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9309 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9311 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9313 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9316 if (TYPE_ALIGN_OK (inner_type))
9319 if (TREE_ADDRESSABLE (inner_type))
9321 /* We can't make a bitwise copy of this object, so fail. */
9322 error ("cannot take the address of an unaligned member");
9326 new = assign_stack_temp_for_type
9327 (TYPE_MODE (inner_type),
9328 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9329 : int_size_in_bytes (inner_type),
9330 1, build_qualified_type (inner_type,
9331 (TYPE_QUALS (inner_type)
9332 | TYPE_QUAL_CONST)));
9334 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9335 (modifier == EXPAND_STACK_PARM
9336 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9341 op0 = force_operand (XEXP (op0, 0), target);
9345 && GET_CODE (op0) != REG
9346 && modifier != EXPAND_CONST_ADDRESS
9347 && modifier != EXPAND_INITIALIZER
9348 && modifier != EXPAND_SUM)
9349 op0 = force_reg (Pmode, op0);
9351 if (GET_CODE (op0) == REG
9352 && ! REG_USERVAR_P (op0))
9353 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9355 #ifdef POINTERS_EXTEND_UNSIGNED
9356 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9357 && mode == ptr_mode)
9358 op0 = convert_memory_address (ptr_mode, op0);
9363 case ENTRY_VALUE_EXPR:
9366 /* COMPLEX type for Extended Pascal & Fortran */
9369 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9372 /* Get the rtx code of the operands. */
9373 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9374 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9377 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9381 /* Move the real (op0) and imaginary (op1) parts to their location. */
9382 emit_move_insn (gen_realpart (mode, target), op0);
9383 emit_move_insn (gen_imagpart (mode, target), op1);
9385 insns = get_insns ();
9388 /* Complex construction should appear as a single unit. */
9389 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9390 each with a separate pseudo as destination.
9391 It's not correct for flow to treat them as a unit. */
9392 if (GET_CODE (target) != CONCAT)
9393 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9401 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9402 return gen_realpart (mode, op0);
9405 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9406 return gen_imagpart (mode, op0);
9410 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9417 target = gen_reg_rtx (mode);
9421 /* Store the realpart and the negated imagpart to target. */
9422 emit_move_insn (gen_realpart (partmode, target),
9423 gen_realpart (partmode, op0));
9425 imag_t = gen_imagpart (partmode, target);
9426 temp = expand_unop (partmode,
9427 ! unsignedp && flag_trapv
9428 && (GET_MODE_CLASS(partmode) == MODE_INT)
9429 ? negv_optab : neg_optab,
9430 gen_imagpart (partmode, op0), imag_t, 0);
9432 emit_move_insn (imag_t, temp);
9434 insns = get_insns ();
9437 /* Conjugate should appear as a single unit
9438 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9439 each with a separate pseudo as destination.
9440 It's not correct for flow to treat them as a unit. */
9441 if (GET_CODE (target) != CONCAT)
9442 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9449 case TRY_CATCH_EXPR:
9451 tree handler = TREE_OPERAND (exp, 1);
9453 expand_eh_region_start ();
9455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9457 expand_eh_region_end_cleanup (handler);
9462 case TRY_FINALLY_EXPR:
9464 tree try_block = TREE_OPERAND (exp, 0);
9465 tree finally_block = TREE_OPERAND (exp, 1);
9467 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9469 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9470 is not sufficient, so we cannot expand the block twice.
9471 So we play games with GOTO_SUBROUTINE_EXPR to let us
9472 expand the thing only once. */
9473 /* When not optimizing, we go ahead with this form since
9474 (1) user breakpoints operate more predictably without
9475 code duplication, and
9476 (2) we're not running any of the global optimizers
9477 that would explode in time/space with the highly
9478 connected CFG created by the indirect branching. */
9480 rtx finally_label = gen_label_rtx ();
9481 rtx done_label = gen_label_rtx ();
9482 rtx return_link = gen_reg_rtx (Pmode);
9483 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9484 (tree) finally_label, (tree) return_link);
9485 TREE_SIDE_EFFECTS (cleanup) = 1;
9487 /* Start a new binding layer that will keep track of all cleanup
9488 actions to be performed. */
9489 expand_start_bindings (2);
9490 target_temp_slot_level = temp_slot_level;
9492 expand_decl_cleanup (NULL_TREE, cleanup);
9493 op0 = expand_expr (try_block, target, tmode, modifier);
9495 preserve_temp_slots (op0);
9496 expand_end_bindings (NULL_TREE, 0, 0);
9497 emit_jump (done_label);
9498 emit_label (finally_label);
9499 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9500 emit_indirect_jump (return_link);
9501 emit_label (done_label);
9505 expand_start_bindings (2);
9506 target_temp_slot_level = temp_slot_level;
9508 expand_decl_cleanup (NULL_TREE, finally_block);
9509 op0 = expand_expr (try_block, target, tmode, modifier);
9511 preserve_temp_slots (op0);
9512 expand_end_bindings (NULL_TREE, 0, 0);
9518 case GOTO_SUBROUTINE_EXPR:
9520 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9521 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9522 rtx return_address = gen_label_rtx ();
9523 emit_move_insn (return_link,
9524 gen_rtx_LABEL_REF (Pmode, return_address));
9526 emit_label (return_address);
9531 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9534 return get_exception_pointer (cfun);
9537 /* Function descriptors are not valid except for as
9538 initialization constants, and should not be expanded. */
9542 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9545 /* Here to do an ordinary binary operator, generating an instruction
9546 from the optab already placed in `this_optab'. */
9548 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9550 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9551 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9553 if (modifier == EXPAND_STACK_PARM)
9555 temp = expand_binop (mode, this_optab, op0, op1, target,
9556 unsignedp, OPTAB_LIB_WIDEN);
9562 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9563 when applied to the address of EXP produces an address known to be
9564 aligned more than BIGGEST_ALIGNMENT. */
9567 is_aligning_offset (offset, exp)
9571 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9572 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9573 || TREE_CODE (offset) == NOP_EXPR
9574 || TREE_CODE (offset) == CONVERT_EXPR
9575 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9576 offset = TREE_OPERAND (offset, 0);
9578 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9579 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9580 if (TREE_CODE (offset) != BIT_AND_EXPR
9581 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9582 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9583 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9586 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9587 It must be NEGATE_EXPR. Then strip any more conversions. */
9588 offset = TREE_OPERAND (offset, 0);
9589 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9590 || TREE_CODE (offset) == NOP_EXPR
9591 || TREE_CODE (offset) == CONVERT_EXPR)
9592 offset = TREE_OPERAND (offset, 0);
9594 if (TREE_CODE (offset) != NEGATE_EXPR)
9597 offset = TREE_OPERAND (offset, 0);
9598 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9599 || TREE_CODE (offset) == NOP_EXPR
9600 || TREE_CODE (offset) == CONVERT_EXPR)
9601 offset = TREE_OPERAND (offset, 0);
9603 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9604 whose type is the same as EXP. */
9605 return (TREE_CODE (offset) == ADDR_EXPR
9606 && (TREE_OPERAND (offset, 0) == exp
9607 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9608 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9609 == TREE_TYPE (exp)))));
9612 /* Return the tree node if an ARG corresponds to a string constant or zero
9613 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9614 in bytes within the string that ARG is accessing. The type of the
9615 offset will be `sizetype'. */
9618 string_constant (arg, ptr_offset)
9624 if (TREE_CODE (arg) == ADDR_EXPR
9625 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9627 *ptr_offset = size_zero_node;
9628 return TREE_OPERAND (arg, 0);
9630 else if (TREE_CODE (arg) == PLUS_EXPR)
9632 tree arg0 = TREE_OPERAND (arg, 0);
9633 tree arg1 = TREE_OPERAND (arg, 1);
9638 if (TREE_CODE (arg0) == ADDR_EXPR
9639 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9641 *ptr_offset = convert (sizetype, arg1);
9642 return TREE_OPERAND (arg0, 0);
9644 else if (TREE_CODE (arg1) == ADDR_EXPR
9645 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9647 *ptr_offset = convert (sizetype, arg0);
9648 return TREE_OPERAND (arg1, 0);
9655 /* Expand code for a post- or pre- increment or decrement
9656 and return the RTX for the result.
9657 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9660 expand_increment (exp, post, ignore)
9666 tree incremented = TREE_OPERAND (exp, 0);
9667 optab this_optab = add_optab;
9669 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9670 int op0_is_copy = 0;
9671 int single_insn = 0;
9672 /* 1 means we can't store into OP0 directly,
9673 because it is a subreg narrower than a word,
9674 and we don't dare clobber the rest of the word. */
9677 /* Stabilize any component ref that might need to be
9678 evaluated more than once below. */
9680 || TREE_CODE (incremented) == BIT_FIELD_REF
9681 || (TREE_CODE (incremented) == COMPONENT_REF
9682 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9683 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9684 incremented = stabilize_reference (incremented);
9685 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9686 ones into save exprs so that they don't accidentally get evaluated
9687 more than once by the code below. */
9688 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9689 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9690 incremented = save_expr (incremented);
9692 /* Compute the operands as RTX.
9693 Note whether OP0 is the actual lvalue or a copy of it:
9694 I believe it is a copy iff it is a register or subreg
9695 and insns were generated in computing it. */
9697 temp = get_last_insn ();
9698 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9700 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9701 in place but instead must do sign- or zero-extension during assignment,
9702 so we copy it into a new register and let the code below use it as
9705 Note that we can safely modify this SUBREG since it is know not to be
9706 shared (it was made by the expand_expr call above). */
9708 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9711 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9715 else if (GET_CODE (op0) == SUBREG
9716 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9718 /* We cannot increment this SUBREG in place. If we are
9719 post-incrementing, get a copy of the old value. Otherwise,
9720 just mark that we cannot increment in place. */
9722 op0 = copy_to_reg (op0);
9727 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9728 && temp != get_last_insn ());
9729 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9731 /* Decide whether incrementing or decrementing. */
9732 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9733 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9734 this_optab = sub_optab;
9736 /* Convert decrement by a constant into a negative increment. */
9737 if (this_optab == sub_optab
9738 && GET_CODE (op1) == CONST_INT)
9740 op1 = GEN_INT (-INTVAL (op1));
9741 this_optab = add_optab;
9744 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9745 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9747 /* For a preincrement, see if we can do this with a single instruction. */
9750 icode = (int) this_optab->handlers[(int) mode].insn_code;
9751 if (icode != (int) CODE_FOR_nothing
9752 /* Make sure that OP0 is valid for operands 0 and 1
9753 of the insn we want to queue. */
9754 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9755 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9756 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9760 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9761 then we cannot just increment OP0. We must therefore contrive to
9762 increment the original value. Then, for postincrement, we can return
9763 OP0 since it is a copy of the old value. For preincrement, expand here
9764 unless we can do it with a single insn.
9766 Likewise if storing directly into OP0 would clobber high bits
9767 we need to preserve (bad_subreg). */
9768 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9770 /* This is the easiest way to increment the value wherever it is.
9771 Problems with multiple evaluation of INCREMENTED are prevented
9772 because either (1) it is a component_ref or preincrement,
9773 in which case it was stabilized above, or (2) it is an array_ref
9774 with constant index in an array in a register, which is
9775 safe to reevaluate. */
9776 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9777 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9778 ? MINUS_EXPR : PLUS_EXPR),
9781 TREE_OPERAND (exp, 1));
9783 while (TREE_CODE (incremented) == NOP_EXPR
9784 || TREE_CODE (incremented) == CONVERT_EXPR)
9786 newexp = convert (TREE_TYPE (incremented), newexp);
9787 incremented = TREE_OPERAND (incremented, 0);
9790 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9791 return post ? op0 : temp;
9796 /* We have a true reference to the value in OP0.
9797 If there is an insn to add or subtract in this mode, queue it.
9798 Queueing the increment insn avoids the register shuffling
9799 that often results if we must increment now and first save
9800 the old value for subsequent use. */
9802 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9803 op0 = stabilize (op0);
9806 icode = (int) this_optab->handlers[(int) mode].insn_code;
9807 if (icode != (int) CODE_FOR_nothing
9808 /* Make sure that OP0 is valid for operands 0 and 1
9809 of the insn we want to queue. */
9810 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9811 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9813 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9814 op1 = force_reg (mode, op1);
9816 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9818 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9820 rtx addr = (general_operand (XEXP (op0, 0), mode)
9821 ? force_reg (Pmode, XEXP (op0, 0))
9822 : copy_to_reg (XEXP (op0, 0)));
9825 op0 = replace_equiv_address (op0, addr);
9826 temp = force_reg (GET_MODE (op0), op0);
9827 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9828 op1 = force_reg (mode, op1);
9830 /* The increment queue is LIFO, thus we have to `queue'
9831 the instructions in reverse order. */
9832 enqueue_insn (op0, gen_move_insn (op0, temp));
9833 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9838 /* Preincrement, or we can't increment with one simple insn. */
9840 /* Save a copy of the value before inc or dec, to return it later. */
9841 temp = value = copy_to_reg (op0);
9843 /* Arrange to return the incremented value. */
9844 /* Copy the rtx because expand_binop will protect from the queue,
9845 and the results of that would be invalid for us to return
9846 if our caller does emit_queue before using our result. */
9847 temp = copy_rtx (value = op0);
9849 /* Increment however we can. */
9850 op1 = expand_binop (mode, this_optab, value, op1, op0,
9851 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9853 /* Make sure the value is stored into OP0. */
9855 emit_move_insn (op0, op1);
9860 /* Generate code to calculate EXP using a store-flag instruction
9861 and return an rtx for the result. EXP is either a comparison
9862 or a TRUTH_NOT_EXPR whose operand is a comparison.
9864 If TARGET is nonzero, store the result there if convenient.
9866 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9869 Return zero if there is no suitable set-flag instruction
9870 available on this machine.
9872 Once expand_expr has been called on the arguments of the comparison,
9873 we are committed to doing the store flag, since it is not safe to
9874 re-evaluate the expression. We emit the store-flag insn by calling
9875 emit_store_flag, but only expand the arguments if we have a reason
9876 to believe that emit_store_flag will be successful. If we think that
9877 it will, but it isn't, we have to simulate the store-flag with a
9878 set/jump/set sequence. */
9881 do_store_flag (exp, target, mode, only_cheap)
9884 enum machine_mode mode;
9888 tree arg0, arg1, type;
9890 enum machine_mode operand_mode;
9894 enum insn_code icode;
9895 rtx subtarget = target;
9898 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9899 result at the end. We can't simply invert the test since it would
9900 have already been inverted if it were valid. This case occurs for
9901 some floating-point comparisons. */
9903 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9904 invert = 1, exp = TREE_OPERAND (exp, 0);
9906 arg0 = TREE_OPERAND (exp, 0);
9907 arg1 = TREE_OPERAND (exp, 1);
9909 /* Don't crash if the comparison was erroneous. */
9910 if (arg0 == error_mark_node || arg1 == error_mark_node)
9913 type = TREE_TYPE (arg0);
9914 operand_mode = TYPE_MODE (type);
9915 unsignedp = TREE_UNSIGNED (type);
9917 /* We won't bother with BLKmode store-flag operations because it would mean
9918 passing a lot of information to emit_store_flag. */
9919 if (operand_mode == BLKmode)
9922 /* We won't bother with store-flag operations involving function pointers
9923 when function pointers must be canonicalized before comparisons. */
9924 #ifdef HAVE_canonicalize_funcptr_for_compare
9925 if (HAVE_canonicalize_funcptr_for_compare
9926 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9927 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9929 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9930 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9931 == FUNCTION_TYPE))))
9938 /* Get the rtx comparison code to use. We know that EXP is a comparison
9939 operation of some type. Some comparisons against 1 and -1 can be
9940 converted to comparisons with zero. Do so here so that the tests
9941 below will be aware that we have a comparison with zero. These
9942 tests will not catch constants in the first operand, but constants
9943 are rarely passed as the first operand. */
9945 switch (TREE_CODE (exp))
9954 if (integer_onep (arg1))
9955 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9957 code = unsignedp ? LTU : LT;
9960 if (! unsignedp && integer_all_onesp (arg1))
9961 arg1 = integer_zero_node, code = LT;
9963 code = unsignedp ? LEU : LE;
9966 if (! unsignedp && integer_all_onesp (arg1))
9967 arg1 = integer_zero_node, code = GE;
9969 code = unsignedp ? GTU : GT;
9972 if (integer_onep (arg1))
9973 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9975 code = unsignedp ? GEU : GE;
9978 case UNORDERED_EXPR:
10004 /* Put a constant second. */
10005 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10007 tem = arg0; arg0 = arg1; arg1 = tem;
10008 code = swap_condition (code);
10011 /* If this is an equality or inequality test of a single bit, we can
10012 do this by shifting the bit being tested to the low-order bit and
10013 masking the result with the constant 1. If the condition was EQ,
10014 we xor it with 1. This does not require an scc insn and is faster
10015 than an scc insn even if we have it. */
10017 if ((code == NE || code == EQ)
10018 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10019 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10021 tree inner = TREE_OPERAND (arg0, 0);
10022 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10025 /* If INNER is a right shift of a constant and it plus BITNUM does
10026 not overflow, adjust BITNUM and INNER. */
10028 if (TREE_CODE (inner) == RSHIFT_EXPR
10029 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10030 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10031 && bitnum < TYPE_PRECISION (type)
10032 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10033 bitnum - TYPE_PRECISION (type)))
10035 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10036 inner = TREE_OPERAND (inner, 0);
10039 /* If we are going to be able to omit the AND below, we must do our
10040 operations as unsigned. If we must use the AND, we have a choice.
10041 Normally unsigned is faster, but for some machines signed is. */
10042 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10043 #ifdef LOAD_EXTEND_OP
10044 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10050 if (! get_subtarget (subtarget)
10051 || GET_MODE (subtarget) != operand_mode
10052 || ! safe_from_p (subtarget, inner, 1))
10055 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10058 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10059 size_int (bitnum), subtarget, ops_unsignedp);
10061 if (GET_MODE (op0) != mode)
10062 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10064 if ((code == EQ && ! invert) || (code == NE && invert))
10065 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10066 ops_unsignedp, OPTAB_LIB_WIDEN);
10068 /* Put the AND last so it can combine with more things. */
10069 if (bitnum != TYPE_PRECISION (type) - 1)
10070 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10075 /* Now see if we are likely to be able to do this. Return if not. */
10076 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10079 icode = setcc_gen_code[(int) code];
10080 if (icode == CODE_FOR_nothing
10081 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10083 /* We can only do this if it is one of the special cases that
10084 can be handled without an scc insn. */
10085 if ((code == LT && integer_zerop (arg1))
10086 || (! only_cheap && code == GE && integer_zerop (arg1)))
10088 else if (BRANCH_COST >= 0
10089 && ! only_cheap && (code == NE || code == EQ)
10090 && TREE_CODE (type) != REAL_TYPE
10091 && ((abs_optab->handlers[(int) operand_mode].insn_code
10092 != CODE_FOR_nothing)
10093 || (ffs_optab->handlers[(int) operand_mode].insn_code
10094 != CODE_FOR_nothing)))
10100 if (! get_subtarget (target)
10101 || GET_MODE (subtarget) != operand_mode
10102 || ! safe_from_p (subtarget, arg1, 1))
10105 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10106 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10109 target = gen_reg_rtx (mode);
10111 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10112 because, if the emit_store_flag does anything it will succeed and
10113 OP0 and OP1 will not be used subsequently. */
10115 result = emit_store_flag (target, code,
10116 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10117 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10118 operand_mode, unsignedp, 1);
10123 result = expand_binop (mode, xor_optab, result, const1_rtx,
10124 result, 0, OPTAB_LIB_WIDEN);
10128 /* If this failed, we have to do this with set/compare/jump/set code. */
10129 if (GET_CODE (target) != REG
10130 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10131 target = gen_reg_rtx (GET_MODE (target));
10133 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10134 result = compare_from_rtx (op0, op1, code, unsignedp,
10135 operand_mode, NULL_RTX);
10136 if (GET_CODE (result) == CONST_INT)
10137 return (((result == const0_rtx && ! invert)
10138 || (result != const0_rtx && invert))
10139 ? const0_rtx : const1_rtx);
10141 /* The code of RESULT may not match CODE if compare_from_rtx
10142 decided to swap its operands and reverse the original code.
10144 We know that compare_from_rtx returns either a CONST_INT or
10145 a new comparison code, so it is safe to just extract the
10146 code from RESULT. */
10147 code = GET_CODE (result);
10149 label = gen_label_rtx ();
10150 if (bcc_gen_fctn[(int) code] == 0)
10153 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10154 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10155 emit_label (label);
10161 /* Stubs in case we haven't got a casesi insn. */
10162 #ifndef HAVE_casesi
10163 # define HAVE_casesi 0
10164 # define gen_casesi(a, b, c, d, e) (0)
10165 # define CODE_FOR_casesi CODE_FOR_nothing
10168 /* If the machine does not have a case insn that compares the bounds,
10169 this means extra overhead for dispatch tables, which raises the
10170 threshold for using them. */
10171 #ifndef CASE_VALUES_THRESHOLD
10172 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10173 #endif /* CASE_VALUES_THRESHOLD */
10176 case_values_threshold ()
10178 return CASE_VALUES_THRESHOLD;
10181 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10182 0 otherwise (i.e. if there is no casesi instruction). */
10184 try_casesi (index_type, index_expr, minval, range,
10185 table_label, default_label)
10186 tree index_type, index_expr, minval, range;
10187 rtx table_label ATTRIBUTE_UNUSED;
10190 enum machine_mode index_mode = SImode;
10191 int index_bits = GET_MODE_BITSIZE (index_mode);
10192 rtx op1, op2, index;
10193 enum machine_mode op_mode;
10198 /* Convert the index to SImode. */
10199 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10201 enum machine_mode omode = TYPE_MODE (index_type);
10202 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10204 /* We must handle the endpoints in the original mode. */
10205 index_expr = build (MINUS_EXPR, index_type,
10206 index_expr, minval);
10207 minval = integer_zero_node;
10208 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10209 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10210 omode, 1, default_label);
10211 /* Now we can safely truncate. */
10212 index = convert_to_mode (index_mode, index, 0);
10216 if (TYPE_MODE (index_type) != index_mode)
10218 index_expr = convert ((*lang_hooks.types.type_for_size)
10219 (index_bits, 0), index_expr);
10220 index_type = TREE_TYPE (index_expr);
10223 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10226 index = protect_from_queue (index, 0);
10227 do_pending_stack_adjust ();
10229 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10230 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10232 index = copy_to_mode_reg (op_mode, index);
10234 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10236 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10237 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10238 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10239 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10241 op1 = copy_to_mode_reg (op_mode, op1);
10243 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10245 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10246 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10247 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10248 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10250 op2 = copy_to_mode_reg (op_mode, op2);
10252 emit_jump_insn (gen_casesi (index, op1, op2,
10253 table_label, default_label));
10257 /* Attempt to generate a tablejump instruction; same concept. */
10258 #ifndef HAVE_tablejump
10259 #define HAVE_tablejump 0
10260 #define gen_tablejump(x, y) (0)
10263 /* Subroutine of the next function.
10265 INDEX is the value being switched on, with the lowest value
10266 in the table already subtracted.
10267 MODE is its expected mode (needed if INDEX is constant).
10268 RANGE is the length of the jump table.
10269 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10271 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10272 index value is out of range. */
10275 do_tablejump (index, mode, range, table_label, default_label)
10276 rtx index, range, table_label, default_label;
10277 enum machine_mode mode;
10281 if (INTVAL (range) > cfun->max_jumptable_ents)
10282 cfun->max_jumptable_ents = INTVAL (range);
10284 /* Do an unsigned comparison (in the proper mode) between the index
10285 expression and the value which represents the length of the range.
10286 Since we just finished subtracting the lower bound of the range
10287 from the index expression, this comparison allows us to simultaneously
10288 check that the original index expression value is both greater than
10289 or equal to the minimum value of the range and less than or equal to
10290 the maximum value of the range. */
10292 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10295 /* If index is in range, it must fit in Pmode.
10296 Convert to Pmode so we can index with it. */
10298 index = convert_to_mode (Pmode, index, 1);
10300 /* Don't let a MEM slip thru, because then INDEX that comes
10301 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10302 and break_out_memory_refs will go to work on it and mess it up. */
10303 #ifdef PIC_CASE_VECTOR_ADDRESS
10304 if (flag_pic && GET_CODE (index) != REG)
10305 index = copy_to_mode_reg (Pmode, index);
10308 /* If flag_force_addr were to affect this address
10309 it could interfere with the tricky assumptions made
10310 about addresses that contain label-refs,
10311 which may be valid only very near the tablejump itself. */
10312 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10313 GET_MODE_SIZE, because this indicates how large insns are. The other
10314 uses should all be Pmode, because they are addresses. This code
10315 could fail if addresses and insns are not the same size. */
10316 index = gen_rtx_PLUS (Pmode,
10317 gen_rtx_MULT (Pmode, index,
10318 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10319 gen_rtx_LABEL_REF (Pmode, table_label));
10320 #ifdef PIC_CASE_VECTOR_ADDRESS
10322 index = PIC_CASE_VECTOR_ADDRESS (index);
10325 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10326 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10327 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10328 RTX_UNCHANGING_P (vector) = 1;
10329 MEM_NOTRAP_P (vector) = 1;
10330 convert_move (temp, vector, 0);
10332 emit_jump_insn (gen_tablejump (temp, table_label));
10334 /* If we are generating PIC code or if the table is PC-relative, the
10335 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10336 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10341 try_tablejump (index_type, index_expr, minval, range,
10342 table_label, default_label)
10343 tree index_type, index_expr, minval, range;
10344 rtx table_label, default_label;
10348 if (! HAVE_tablejump)
10351 index_expr = fold (build (MINUS_EXPR, index_type,
10352 convert (index_type, index_expr),
10353 convert (index_type, minval)));
10354 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10356 index = protect_from_queue (index, 0);
10357 do_pending_stack_adjust ();
10359 do_tablejump (index, TYPE_MODE (index_type),
10360 convert_modes (TYPE_MODE (index_type),
10361 TYPE_MODE (TREE_TYPE (range)),
10362 expand_expr (range, NULL_RTX,
10364 TREE_UNSIGNED (TREE_TYPE (range))),
10365 table_label, default_label);
10369 /* Nonzero if the mode is a valid vector mode for this architecture.
10370 This returns nonzero even if there is no hardware support for the
10371 vector mode, but we can emulate with narrower modes. */
10374 vector_mode_valid_p (mode)
10375 enum machine_mode mode;
10377 enum mode_class class = GET_MODE_CLASS (mode);
10378 enum machine_mode innermode;
10380 /* Doh! What's going on? */
10381 if (class != MODE_VECTOR_INT
10382 && class != MODE_VECTOR_FLOAT)
10385 /* Hardware support. Woo hoo! */
10386 if (VECTOR_MODE_SUPPORTED_P (mode))
10389 innermode = GET_MODE_INNER (mode);
10391 /* We should probably return 1 if requesting V4DI and we have no DI,
10392 but we have V2DI, but this is probably very unlikely. */
10394 /* If we have support for the inner mode, we can safely emulate it.
10395 We may not have V2DI, but me can emulate with a pair of DIs. */
10396 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10399 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10401 const_vector_from_tree (exp)
10407 enum machine_mode inner, mode;
10409 mode = TYPE_MODE (TREE_TYPE (exp));
10411 if (is_zeros_p (exp))
10412 return CONST0_RTX (mode);
10414 units = GET_MODE_NUNITS (mode);
10415 inner = GET_MODE_INNER (mode);
10417 v = rtvec_alloc (units);
10419 link = TREE_VECTOR_CST_ELTS (exp);
10420 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10422 elt = TREE_VALUE (link);
10424 if (TREE_CODE (elt) == REAL_CST)
10425 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10428 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10429 TREE_INT_CST_HIGH (elt),
10433 return gen_rtx_raw_CONST_VECTOR (mode, v);
10436 #include "gt-expr.h"