1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
239 struct file_stack *expr_wfl_stack;
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
251 init_expr_once (void)
254 enum machine_mode mode;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
291 if (! HARD_REGNO_MODE_OK (regno, mode))
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
349 /* Small sanity check that the queue is empty at the end of a function. */
352 finish_expr_for_function (void)
358 /* Manage the queue of increment instructions to be output
359 for POSTINCREMENT_EXPR expressions, etc. */
361 /* Queue up to increment (or change) VAR later. BODY says how:
362 BODY should be the same thing you would pass to emit_insn
363 to increment right away. It will go to emit_insn later on.
365 The value is a QUEUED expression to be used in place of VAR
366 where you want to guarantee the pre-incrementation value of VAR. */
369 enqueue_insn (rtx var, rtx body)
371 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
372 body, pending_chain);
373 return pending_chain;
376 /* Use protect_from_queue to convert a QUEUED expression
377 into something that you can put immediately into an instruction.
378 If the queued incrementation has not happened yet,
379 protect_from_queue returns the variable itself.
380 If the incrementation has happened, protect_from_queue returns a temp
381 that contains a copy of the old value of the variable.
383 Any time an rtx which might possibly be a QUEUED is to be put
384 into an instruction, it must be passed through protect_from_queue first.
385 QUEUED expressions are not meaningful in instructions.
387 Do not pass a value through protect_from_queue and then hold
388 on to it for a while before putting it in an instruction!
389 If the queue is flushed in between, incorrect code will result. */
392 protect_from_queue (rtx x, int modify)
394 RTX_CODE code = GET_CODE (x);
396 #if 0 /* A QUEUED can hang around after the queue is forced out. */
397 /* Shortcut for most common case. */
398 if (pending_chain == 0)
404 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
405 use of autoincrement. Make a copy of the contents of the memory
406 location rather than a copy of the address, but not if the value is
407 of mode BLKmode. Don't modify X in place since it might be
409 if (code == MEM && GET_MODE (x) != BLKmode
410 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
413 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
417 rtx temp = gen_reg_rtx (GET_MODE (x));
419 emit_insn_before (gen_move_insn (temp, new),
424 /* Copy the address into a pseudo, so that the returned value
425 remains correct across calls to emit_queue. */
426 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
440 else if (code == PLUS || code == MULT)
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 /* If the increment has not happened, use the variable itself. Copy it
454 into a new pseudo so that the value remains correct across calls to
456 if (QUEUED_INSN (x) == 0)
457 return copy_to_reg (QUEUED_VAR (x));
458 /* If the increment has happened and a pre-increment copy exists,
460 if (QUEUED_COPY (x) != 0)
461 return QUEUED_COPY (x);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
467 return QUEUED_COPY (x);
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
476 queued_subexp_p (rtx x)
478 enum rtx_code code = GET_CODE (x);
484 return queued_subexp_p (XEXP (x, 0));
488 return (queued_subexp_p (XEXP (x, 0))
489 || queued_subexp_p (XEXP (x, 1)));
495 /* Perform all the pending incrementations. */
501 while ((p = pending_chain))
503 rtx body = QUEUED_BODY (p);
505 switch (GET_CODE (body))
513 QUEUED_INSN (p) = body;
517 #ifdef ENABLE_CHECKING
524 QUEUED_INSN (p) = emit_insn (body);
528 pending_chain = QUEUED_NEXT (p);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
573 emit_move_insn (to, from);
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
587 emit_move_insn (to, from);
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
598 if (to_real != from_real)
605 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, 0))
611 emit_unop_insn (code, to, from, UNKNOWN);
616 #ifdef HAVE_trunchfqf2
617 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfqf2
624 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfqf2
631 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
633 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfqf2
638 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
640 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfqf2
645 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
647 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfqf2
652 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
654 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 #ifdef HAVE_trunctqfhf2
660 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncsfhf2
667 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
669 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfhf2
674 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
676 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfhf2
681 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
683 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfhf2
688 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
690 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncsftqf2
696 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
698 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncdftqf2
703 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
705 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxftqf2
710 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
712 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctftqf2
717 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
719 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 #ifdef HAVE_truncdfsf2
725 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
727 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 #ifdef HAVE_truncxfsf2
732 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
734 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 #ifdef HAVE_trunctfsf2
739 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
741 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 #ifdef HAVE_truncxfdf2
746 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
748 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 #ifdef HAVE_trunctfdf2
753 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
755 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 libcall = extendsfdf2_libfunc;
771 libcall = extendsfxf2_libfunc;
775 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
807 libcall = truncxfsf2_libfunc;
811 libcall = truncxfdf2_libfunc;
823 libcall = trunctfsf2_libfunc;
827 libcall = trunctfdf2_libfunc;
839 if (libcall == (rtx) 0)
840 /* This conversion is not implemented yet. */
844 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
846 insns = get_insns ();
848 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 /* Now both modes are integers. */
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
893 /* No special multiword conversion insn; do it by hand. */
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
906 lowpart_mode = from_mode;
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
913 /* Compute the value to put in each remaining word. */
915 fill_value = const0_rtx;
920 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
921 && STORE_FLAG_VALUE == -1)
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
952 insns = get_insns ();
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
975 /* Handle pointer conversion. */ /* SPEE 900220. */
976 if (to_mode == PQImode)
978 if (from_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
981 #ifdef HAVE_truncqipqi2
982 if (HAVE_truncqipqi2)
984 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
987 #endif /* HAVE_truncqipqi2 */
991 if (from_mode == PQImode)
993 if (to_mode != QImode)
995 from = convert_to_mode (QImode, from, unsignedp);
1000 #ifdef HAVE_extendpqiqi2
1001 if (HAVE_extendpqiqi2)
1003 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1006 #endif /* HAVE_extendpqiqi2 */
1011 if (to_mode == PSImode)
1013 if (from_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1016 #ifdef HAVE_truncsipsi2
1017 if (HAVE_truncsipsi2)
1019 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1022 #endif /* HAVE_truncsipsi2 */
1026 if (from_mode == PSImode)
1028 if (to_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1035 #ifdef HAVE_extendpsisi2
1036 if (! unsignedp && HAVE_extendpsisi2)
1038 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1041 #endif /* HAVE_extendpsisi2 */
1042 #ifdef HAVE_zero_extendpsisi2
1043 if (unsignedp && HAVE_zero_extendpsisi2)
1045 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1048 #endif /* HAVE_zero_extendpsisi2 */
1053 if (to_mode == PDImode)
1055 if (from_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1058 #ifdef HAVE_truncdipdi2
1059 if (HAVE_truncdipdi2)
1061 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1064 #endif /* HAVE_truncdipdi2 */
1068 if (from_mode == PDImode)
1070 if (to_mode != DImode)
1072 from = convert_to_mode (DImode, from, unsignedp);
1077 #ifdef HAVE_extendpdidi2
1078 if (HAVE_extendpdidi2)
1080 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1083 #endif /* HAVE_extendpdidi2 */
1088 /* Now follow all the conversions between integers
1089 no more than a word long. */
1091 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1092 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1093 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1094 GET_MODE_BITSIZE (from_mode)))
1096 if (!((GET_CODE (from) == MEM
1097 && ! MEM_VOLATILE_P (from)
1098 && direct_load[(int) to_mode]
1099 && ! mode_dependent_address_p (XEXP (from, 0)))
1100 || GET_CODE (from) == REG
1101 || GET_CODE (from) == SUBREG))
1102 from = force_reg (from_mode, from);
1103 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1104 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1105 from = copy_to_reg (from);
1106 emit_move_insn (to, gen_lowpart (to_mode, from));
1110 /* Handle extension. */
1111 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1113 /* Convert directly if that works. */
1114 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1115 != CODE_FOR_nothing)
1118 from = force_not_mem (from);
1120 emit_unop_insn (code, to, from, equiv_code);
1125 enum machine_mode intermediate;
1129 /* Search for a mode to convert via. */
1130 for (intermediate = from_mode; intermediate != VOIDmode;
1131 intermediate = GET_MODE_WIDER_MODE (intermediate))
1132 if (((can_extend_p (to_mode, intermediate, unsignedp)
1133 != CODE_FOR_nothing)
1134 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1135 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1136 GET_MODE_BITSIZE (intermediate))))
1137 && (can_extend_p (intermediate, from_mode, unsignedp)
1138 != CODE_FOR_nothing))
1140 convert_move (to, convert_to_mode (intermediate, from,
1141 unsignedp), unsignedp);
1145 /* No suitable intermediate mode.
1146 Generate what we need with shifts. */
1147 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1148 - GET_MODE_BITSIZE (from_mode), 0);
1149 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1150 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1152 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1155 emit_move_insn (to, tmp);
1160 /* Support special truncate insns for certain modes. */
1162 if (from_mode == DImode && to_mode == SImode)
1164 #ifdef HAVE_truncdisi2
1165 if (HAVE_truncdisi2)
1167 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == DImode && to_mode == HImode)
1177 #ifdef HAVE_truncdihi2
1178 if (HAVE_truncdihi2)
1180 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == DImode && to_mode == QImode)
1190 #ifdef HAVE_truncdiqi2
1191 if (HAVE_truncdiqi2)
1193 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == SImode && to_mode == HImode)
1203 #ifdef HAVE_truncsihi2
1204 if (HAVE_truncsihi2)
1206 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == SImode && to_mode == QImode)
1216 #ifdef HAVE_truncsiqi2
1217 if (HAVE_truncsiqi2)
1219 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 if (from_mode == HImode && to_mode == QImode)
1229 #ifdef HAVE_trunchiqi2
1230 if (HAVE_trunchiqi2)
1232 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 if (from_mode == TImode && to_mode == DImode)
1242 #ifdef HAVE_trunctidi2
1243 if (HAVE_trunctidi2)
1245 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 if (from_mode == TImode && to_mode == SImode)
1255 #ifdef HAVE_trunctisi2
1256 if (HAVE_trunctisi2)
1258 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262 convert_move (to, force_reg (from_mode, from), unsignedp);
1266 if (from_mode == TImode && to_mode == HImode)
1268 #ifdef HAVE_trunctihi2
1269 if (HAVE_trunctihi2)
1271 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275 convert_move (to, force_reg (from_mode, from), unsignedp);
1279 if (from_mode == TImode && to_mode == QImode)
1281 #ifdef HAVE_trunctiqi2
1282 if (HAVE_trunctiqi2)
1284 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288 convert_move (to, force_reg (from_mode, from), unsignedp);
1292 /* Handle truncation of volatile memrefs, and so on;
1293 the things that couldn't be truncated directly,
1294 and for which there was no special instruction. */
1295 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1297 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1298 emit_move_insn (to, temp);
1302 /* Mode combination is not recognized. */
1306 /* Return an rtx for a value that would result
1307 from converting X to mode MODE.
1308 Both X and MODE may be floating, or both integer.
1309 UNSIGNEDP is nonzero if X is an unsigned value.
1310 This can be done by referring to a part of X in place
1311 or by copying to a new temporary with conversion.
1313 This function *must not* call protect_from_queue
1314 except when putting X into an insn (in which case convert_move does it). */
1317 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1319 return convert_modes (mode, VOIDmode, x, unsignedp);
1322 /* Return an rtx for a value that would result
1323 from converting X from mode OLDMODE to mode MODE.
1324 Both modes may be floating, or both integer.
1325 UNSIGNEDP is nonzero if X is an unsigned value.
1327 This can be done by referring to a part of X in place
1328 or by copying to a new temporary with conversion.
1330 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1332 This function *must not* call protect_from_queue
1333 except when putting X into an insn (in which case convert_move does it). */
1336 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1340 /* If FROM is a SUBREG that indicates that we have already done at least
1341 the required extension, strip it. */
1343 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1344 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1345 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1346 x = gen_lowpart (mode, x);
1348 if (GET_MODE (x) != VOIDmode)
1349 oldmode = GET_MODE (x);
1351 if (mode == oldmode)
1354 /* There is one case that we must handle specially: If we are converting
1355 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1356 we are to interpret the constant as unsigned, gen_lowpart will do
1357 the wrong if the constant appears negative. What we want to do is
1358 make the high-order word of the constant zero, not all ones. */
1360 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1361 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1362 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1364 HOST_WIDE_INT val = INTVAL (x);
1366 if (oldmode != VOIDmode
1367 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We need to zero extend VAL. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1378 /* We can do this with a gen_lowpart if both desired and current modes
1379 are integer, and this is either a constant integer, a register, or a
1380 non-volatile MEM. Except for the constant case where MODE is no
1381 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1383 if ((GET_CODE (x) == CONST_INT
1384 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1385 || (GET_MODE_CLASS (mode) == MODE_INT
1386 && GET_MODE_CLASS (oldmode) == MODE_INT
1387 && (GET_CODE (x) == CONST_DOUBLE
1388 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1389 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1390 && direct_load[(int) mode])
1391 || (GET_CODE (x) == REG
1392 && (! HARD_REGISTER_P (x)
1393 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1394 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1395 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1397 /* ?? If we don't know OLDMODE, we have to assume here that
1398 X does not need sign- or zero-extension. This may not be
1399 the case, but it's the best we can do. */
1400 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1401 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1403 HOST_WIDE_INT val = INTVAL (x);
1404 int width = GET_MODE_BITSIZE (oldmode);
1406 /* We must sign or zero-extend in this case. Start by
1407 zero-extending, then sign extend if we need to. */
1408 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1410 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1411 val |= (HOST_WIDE_INT) (-1) << width;
1413 return gen_int_mode (val, mode);
1416 return gen_lowpart (mode, x);
1419 /* Converting from integer constant into mode is always equivalent to an
1420 subreg operation. */
1421 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1423 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1425 return simplify_gen_subreg (mode, x, oldmode, 0);
1428 temp = gen_reg_rtx (mode);
1429 convert_move (temp, x, unsignedp);
1433 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1434 store efficiently. Due to internal GCC limitations, this is
1435 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1436 for an immediate constant. */
1438 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1440 /* Determine whether the LEN bytes can be moved by using several move
1441 instructions. Return nonzero if a call to move_by_pieces should
1445 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1446 unsigned int align ATTRIBUTE_UNUSED)
1448 return MOVE_BY_PIECES_P (len, align);
1451 /* Generate several move instructions to copy LEN bytes from block FROM to
1452 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1453 and TO through protect_from_queue before calling.
1455 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1456 used to push FROM to the stack.
1458 ALIGN is maximum stack alignment we can assume.
1460 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1461 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1465 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1466 unsigned int align, int endp)
1468 struct move_by_pieces data;
1469 rtx to_addr, from_addr = XEXP (from, 0);
1470 unsigned int max_size = MOVE_MAX_PIECES + 1;
1471 enum machine_mode mode = VOIDmode, tmode;
1472 enum insn_code icode;
1474 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1477 data.from_addr = from_addr;
1480 to_addr = XEXP (to, 0);
1483 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1484 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1486 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1493 #ifdef STACK_GROWS_DOWNWARD
1499 data.to_addr = to_addr;
1502 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1503 || GET_CODE (from_addr) == POST_INC
1504 || GET_CODE (from_addr) == POST_DEC);
1506 data.explicit_inc_from = 0;
1507 data.explicit_inc_to = 0;
1508 if (data.reverse) data.offset = len;
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data.autinc_from && data.autinc_to)
1515 && move_by_pieces_ninsns (len, align) > 2)
1517 /* Find the mode of the largest move... */
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1523 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1525 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1526 data.autinc_from = 1;
1527 data.explicit_inc_from = -1;
1529 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = 1;
1535 if (!data.autinc_from && CONSTANT_P (from_addr))
1536 data.from_addr = copy_addr_to_reg (from_addr);
1537 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1539 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1541 data.explicit_inc_to = -1;
1543 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1545 data.to_addr = copy_addr_to_reg (to_addr);
1547 data.explicit_inc_to = 1;
1549 if (!data.autinc_to && CONSTANT_P (to_addr))
1550 data.to_addr = copy_addr_to_reg (to_addr);
1553 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1554 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1555 align = MOVE_MAX * BITS_PER_UNIT;
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1560 while (max_size > 1)
1562 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1563 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1564 if (GET_MODE_SIZE (tmode) < max_size)
1567 if (mode == VOIDmode)
1570 icode = mov_optab->handlers[(int) mode].insn_code;
1571 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1572 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1574 max_size = GET_MODE_SIZE (mode);
1577 /* The code above should have handled everything. */
1591 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1592 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1594 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1597 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1604 to1 = adjust_address (data.to, QImode, data.offset);
1612 /* Return number of insns required to move L bytes by pieces.
1613 ALIGN (in bits) is maximum alignment we can assume. */
1615 static unsigned HOST_WIDE_INT
1616 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1618 unsigned HOST_WIDE_INT n_insns = 0;
1619 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1621 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1622 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1623 align = MOVE_MAX * BITS_PER_UNIT;
1625 while (max_size > 1)
1627 enum machine_mode mode = VOIDmode, tmode;
1628 enum insn_code icode;
1630 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1631 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1632 if (GET_MODE_SIZE (tmode) < max_size)
1635 if (mode == VOIDmode)
1638 icode = mov_optab->handlers[(int) mode].insn_code;
1639 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1640 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1642 max_size = GET_MODE_SIZE (mode);
1650 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1651 with move instructions for mode MODE. GENFUN is the gen_... function
1652 to make a move insn for that mode. DATA has all the other info. */
1655 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1656 struct move_by_pieces *data)
1658 unsigned int size = GET_MODE_SIZE (mode);
1659 rtx to1 = NULL_RTX, from1;
1661 while (data->len >= size)
1664 data->offset -= size;
1668 if (data->autinc_to)
1669 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1672 to1 = adjust_address (data->to, mode, data->offset);
1675 if (data->autinc_from)
1676 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1679 from1 = adjust_address (data->from, mode, data->offset);
1681 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1682 emit_insn (gen_add2_insn (data->to_addr,
1683 GEN_INT (-(HOST_WIDE_INT)size)));
1684 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1685 emit_insn (gen_add2_insn (data->from_addr,
1686 GEN_INT (-(HOST_WIDE_INT)size)));
1689 emit_insn ((*genfun) (to1, from1));
1692 #ifdef PUSH_ROUNDING
1693 emit_single_push_insn (mode, from1, NULL);
1699 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1700 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1701 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1702 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1704 if (! data->reverse)
1705 data->offset += size;
1711 /* Emit code to move a block Y to a block X. This may be done with
1712 string-move instructions, with multiple scalar move instructions,
1713 or with a library call.
1715 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1716 SIZE is an rtx that says how long they are.
1717 ALIGN is the maximum alignment we can assume they have.
1718 METHOD describes what kind of copy this is, and what mechanisms may be used.
1720 Return the address of the new block, if memcpy is called and returns it,
1724 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1732 case BLOCK_OP_NORMAL:
1733 may_use_call = true;
1736 case BLOCK_OP_CALL_PARM:
1737 may_use_call = block_move_libcall_safe_for_call_parm ();
1739 /* Make inhibit_defer_pop nonzero around the library call
1740 to force it to pop the arguments right away. */
1744 case BLOCK_OP_NO_LIBCALL:
1745 may_use_call = false;
1752 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1754 if (GET_MODE (x) != BLKmode)
1756 if (GET_MODE (y) != BLKmode)
1759 x = protect_from_queue (x, 1);
1760 y = protect_from_queue (y, 0);
1761 size = protect_from_queue (size, 0);
1763 if (GET_CODE (x) != MEM)
1765 if (GET_CODE (y) != MEM)
1770 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1771 can be incorrect is coming from __builtin_memcpy. */
1772 if (GET_CODE (size) == CONST_INT)
1774 if (INTVAL (size) == 0)
1777 x = shallow_copy_rtx (x);
1778 y = shallow_copy_rtx (y);
1779 set_mem_size (x, size);
1780 set_mem_size (y, size);
1783 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1784 move_by_pieces (x, y, INTVAL (size), align, 0);
1785 else if (emit_block_move_via_movstr (x, y, size, align))
1787 else if (may_use_call)
1788 retval = emit_block_move_via_libcall (x, y, size);
1790 emit_block_move_via_loop (x, y, size, align);
1792 if (method == BLOCK_OP_CALL_PARM)
1798 /* A subroutine of emit_block_move. Returns true if calling the
1799 block move libcall will not clobber any parameters which may have
1800 already been placed on the stack. */
1803 block_move_libcall_safe_for_call_parm (void)
1809 /* Check to see whether memcpy takes all register arguments. */
1811 takes_regs_uninit, takes_regs_no, takes_regs_yes
1812 } takes_regs = takes_regs_uninit;
1816 case takes_regs_uninit:
1818 CUMULATIVE_ARGS args_so_far;
1821 fn = emit_block_move_libcall_fn (false);
1822 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1824 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1825 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1827 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1828 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1829 if (!tmp || !REG_P (tmp))
1830 goto fail_takes_regs;
1831 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1832 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1834 goto fail_takes_regs;
1836 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1839 takes_regs = takes_regs_yes;
1842 case takes_regs_yes:
1846 takes_regs = takes_regs_no;
1857 /* A subroutine of emit_block_move. Expand a movstr pattern;
1858 return true if successful. */
1861 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1863 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1864 enum machine_mode mode;
1866 /* Since this is a move insn, we don't care about volatility. */
1869 /* Try the most limited insn first, because there's no point
1870 including more than one in the machine description unless
1871 the more limited one has some advantage. */
1873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1874 mode = GET_MODE_WIDER_MODE (mode))
1876 enum insn_code code = movstr_optab[(int) mode];
1877 insn_operand_predicate_fn pred;
1879 if (code != CODE_FOR_nothing
1880 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1881 here because if SIZE is less than the mode mask, as it is
1882 returned by the macro, it will definitely be less than the
1883 actual mode mask. */
1884 && ((GET_CODE (size) == CONST_INT
1885 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1886 <= (GET_MODE_MASK (mode) >> 1)))
1887 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1888 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1889 || (*pred) (x, BLKmode))
1890 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1891 || (*pred) (y, BLKmode))
1892 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1893 || (*pred) (opalign, VOIDmode)))
1896 rtx last = get_last_insn ();
1899 op2 = convert_to_mode (mode, size, 1);
1900 pred = insn_data[(int) code].operand[2].predicate;
1901 if (pred != 0 && ! (*pred) (op2, mode))
1902 op2 = copy_to_mode_reg (mode, op2);
1904 /* ??? When called via emit_block_move_for_call, it'd be
1905 nice if there were some way to inform the backend, so
1906 that it doesn't fail the expansion because it thinks
1907 emitting the libcall would be more efficient. */
1909 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1917 delete_insns_since (last);
1925 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1926 Return the return value from memcpy, 0 otherwise. */
1929 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1931 rtx dst_addr, src_addr;
1932 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1933 enum machine_mode size_mode;
1936 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1938 It is unsafe to save the value generated by protect_from_queue and reuse
1939 it later. Consider what happens if emit_queue is called before the
1940 return value from protect_from_queue is used.
1942 Expansion of the CALL_EXPR below will call emit_queue before we are
1943 finished emitting RTL for argument setup. So if we are not careful we
1944 could get the wrong value for an argument.
1946 To avoid this problem we go ahead and emit code to copy the addresses of
1947 DST and SRC and SIZE into new pseudos. We can then place those new
1948 pseudos into an RTL_EXPR and use them later, even after a call to
1951 Note this is not strictly needed for library calls since they do not call
1952 emit_queue before loading their arguments. However, we may need to have
1953 library calls call emit_queue in the future since failing to do so could
1954 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1955 arguments in registers. */
1957 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1958 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1960 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1961 src_addr = convert_memory_address (ptr_mode, src_addr);
1963 dst_tree = make_tree (ptr_type_node, dst_addr);
1964 src_tree = make_tree (ptr_type_node, src_addr);
1966 if (TARGET_MEM_FUNCTIONS)
1967 size_mode = TYPE_MODE (sizetype);
1969 size_mode = TYPE_MODE (unsigned_type_node);
1971 size = convert_to_mode (size_mode, size, 1);
1972 size = copy_to_mode_reg (size_mode, size);
1974 /* It is incorrect to use the libcall calling conventions to call
1975 memcpy in this context. This could be a user call to memcpy and
1976 the user may wish to examine the return value from memcpy. For
1977 targets where libcalls and normal calls have different conventions
1978 for returning pointers, we could end up generating incorrect code.
1980 For convenience, we generate the call to bcopy this way as well. */
1982 if (TARGET_MEM_FUNCTIONS)
1983 size_tree = make_tree (sizetype, size);
1985 size_tree = make_tree (unsigned_type_node, size);
1987 fn = emit_block_move_libcall_fn (true);
1988 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1989 if (TARGET_MEM_FUNCTIONS)
1991 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1992 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1996 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1997 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2000 /* Now we have to build up the CALL_EXPR itself. */
2001 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2002 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2003 call_expr, arg_list, NULL_TREE);
2005 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2007 /* If we are initializing a readonly value, show the above call clobbered
2008 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2009 the delay slot scheduler might overlook conflicts and take nasty
2011 if (RTX_UNCHANGING_P (dst))
2012 add_function_usage_to
2013 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2014 gen_rtx_CLOBBER (VOIDmode, dst),
2017 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2020 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2021 for the function we use for block copies. The first time FOR_CALL
2022 is true, we call assemble_external. */
2024 static GTY(()) tree block_move_fn;
2027 init_block_move_fn (const char *asmspec)
2033 if (TARGET_MEM_FUNCTIONS)
2035 fn = get_identifier ("memcpy");
2036 args = build_function_type_list (ptr_type_node, ptr_type_node,
2037 const_ptr_type_node, sizetype,
2042 fn = get_identifier ("bcopy");
2043 args = build_function_type_list (void_type_node, const_ptr_type_node,
2044 ptr_type_node, unsigned_type_node,
2048 fn = build_decl (FUNCTION_DECL, fn, args);
2049 DECL_EXTERNAL (fn) = 1;
2050 TREE_PUBLIC (fn) = 1;
2051 DECL_ARTIFICIAL (fn) = 1;
2052 TREE_NOTHROW (fn) = 1;
2059 SET_DECL_RTL (block_move_fn, NULL_RTX);
2060 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2065 emit_block_move_libcall_fn (int for_call)
2067 static bool emitted_extern;
2070 init_block_move_fn (NULL);
2072 if (for_call && !emitted_extern)
2074 emitted_extern = true;
2075 make_decl_rtl (block_move_fn, NULL);
2076 assemble_external (block_move_fn);
2079 return block_move_fn;
2082 /* A subroutine of emit_block_move. Copy the data via an explicit
2083 loop. This is used only when libcalls are forbidden. */
2084 /* ??? It'd be nice to copy in hunks larger than QImode. */
2087 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2088 unsigned int align ATTRIBUTE_UNUSED)
2090 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2091 enum machine_mode iter_mode;
2093 iter_mode = GET_MODE (size);
2094 if (iter_mode == VOIDmode)
2095 iter_mode = word_mode;
2097 top_label = gen_label_rtx ();
2098 cmp_label = gen_label_rtx ();
2099 iter = gen_reg_rtx (iter_mode);
2101 emit_move_insn (iter, const0_rtx);
2103 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2104 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2105 do_pending_stack_adjust ();
2107 emit_note (NOTE_INSN_LOOP_BEG);
2109 emit_jump (cmp_label);
2110 emit_label (top_label);
2112 tmp = convert_modes (Pmode, iter_mode, iter, true);
2113 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2114 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2115 x = change_address (x, QImode, x_addr);
2116 y = change_address (y, QImode, y_addr);
2118 emit_move_insn (x, y);
2120 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2121 true, OPTAB_LIB_WIDEN);
2123 emit_move_insn (iter, tmp);
2125 emit_note (NOTE_INSN_LOOP_CONT);
2126 emit_label (cmp_label);
2128 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2131 emit_note (NOTE_INSN_LOOP_END);
2134 /* Copy all or part of a value X into registers starting at REGNO.
2135 The number of registers to be filled is NREGS. */
2138 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2141 #ifdef HAVE_load_multiple
2149 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2150 x = validize_mem (force_const_mem (mode, x));
2152 /* See if the machine can do this with a load multiple insn. */
2153 #ifdef HAVE_load_multiple
2154 if (HAVE_load_multiple)
2156 last = get_last_insn ();
2157 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2165 delete_insns_since (last);
2169 for (i = 0; i < nregs; i++)
2170 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2171 operand_subword_force (x, i, mode));
2174 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2175 The number of registers to be filled is NREGS. */
2178 move_block_from_reg (int regno, rtx x, int nregs)
2185 /* See if the machine can do this with a store multiple insn. */
2186 #ifdef HAVE_store_multiple
2187 if (HAVE_store_multiple)
2189 rtx last = get_last_insn ();
2190 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2198 delete_insns_since (last);
2202 for (i = 0; i < nregs; i++)
2204 rtx tem = operand_subword (x, i, 1, BLKmode);
2209 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2213 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2214 ORIG, where ORIG is a non-consecutive group of registers represented by
2215 a PARALLEL. The clone is identical to the original except in that the
2216 original set of registers is replaced by a new set of pseudo registers.
2217 The new set has the same modes as the original set. */
2220 gen_group_rtx (rtx orig)
2225 if (GET_CODE (orig) != PARALLEL)
2228 length = XVECLEN (orig, 0);
2229 tmps = alloca (sizeof (rtx) * length);
2231 /* Skip a NULL entry in first slot. */
2232 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2237 for (; i < length; i++)
2239 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2240 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2242 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2249 where DST is non-consecutive registers represented by a PARALLEL.
2250 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2254 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2259 if (GET_CODE (dst) != PARALLEL)
2262 /* Check for a NULL entry, used to indicate that the parameter goes
2263 both on the stack and in registers. */
2264 if (XEXP (XVECEXP (dst, 0, 0), 0))
2269 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
2271 /* Process the pieces. */
2272 for (i = start; i < XVECLEN (dst, 0); i++)
2274 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2275 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2276 unsigned int bytelen = GET_MODE_SIZE (mode);
2279 /* Handle trailing fragments that run over the size of the struct. */
2280 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2282 /* Arrange to shift the fragment to where it belongs.
2283 extract_bit_field loads to the lsb of the reg. */
2285 #ifdef BLOCK_REG_PADDING
2286 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2287 == (BYTES_BIG_ENDIAN ? upward : downward)
2292 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2293 bytelen = ssize - bytepos;
2298 /* If we won't be loading directly from memory, protect the real source
2299 from strange tricks we might play; but make sure that the source can
2300 be loaded directly into the destination. */
2302 if (GET_CODE (orig_src) != MEM
2303 && (!CONSTANT_P (orig_src)
2304 || (GET_MODE (orig_src) != mode
2305 && GET_MODE (orig_src) != VOIDmode)))
2307 if (GET_MODE (orig_src) == VOIDmode)
2308 src = gen_reg_rtx (mode);
2310 src = gen_reg_rtx (GET_MODE (orig_src));
2312 emit_move_insn (src, orig_src);
2315 /* Optimize the access just a bit. */
2316 if (GET_CODE (src) == MEM
2317 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2318 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2319 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2320 && bytelen == GET_MODE_SIZE (mode))
2322 tmps[i] = gen_reg_rtx (mode);
2323 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2325 else if (GET_CODE (src) == CONCAT)
2327 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2328 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2330 if ((bytepos == 0 && bytelen == slen0)
2331 || (bytepos != 0 && bytepos + bytelen <= slen))
2333 /* The following assumes that the concatenated objects all
2334 have the same size. In this case, a simple calculation
2335 can be used to determine the object and the bit field
2337 tmps[i] = XEXP (src, bytepos / slen0);
2338 if (! CONSTANT_P (tmps[i])
2339 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2340 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2341 (bytepos % slen0) * BITS_PER_UNIT,
2342 1, NULL_RTX, mode, mode, ssize);
2344 else if (bytepos == 0)
2346 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2347 emit_move_insn (mem, src);
2348 tmps[i] = adjust_address (mem, mode, 0);
2353 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2354 SIMD register, which is currently broken. While we get GCC
2355 to emit proper RTL for these cases, let's dump to memory. */
2356 else if (VECTOR_MODE_P (GET_MODE (dst))
2357 && GET_CODE (src) == REG)
2359 int slen = GET_MODE_SIZE (GET_MODE (src));
2362 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2363 emit_move_insn (mem, src);
2364 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2366 else if (CONSTANT_P (src)
2367 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2370 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2371 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2375 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2376 tmps[i], 0, OPTAB_WIDEN);
2381 /* Copy the extracted pieces into the proper (probable) hard regs. */
2382 for (i = start; i < XVECLEN (dst, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2386 /* Emit code to move a block SRC to block DST, where SRC and DST are
2387 non-consecutive groups of registers, each represented by a PARALLEL. */
2390 emit_group_move (rtx dst, rtx src)
2394 if (GET_CODE (src) != PARALLEL
2395 || GET_CODE (dst) != PARALLEL
2396 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2399 /* Skip first entry if NULL. */
2400 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2401 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2402 XEXP (XVECEXP (src, 0, i), 0));
2405 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2406 where SRC is non-consecutive registers represented by a PARALLEL.
2407 SSIZE represents the total size of block ORIG_DST, or -1 if not
2411 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2416 if (GET_CODE (src) != PARALLEL)
2419 /* Check for a NULL entry, used to indicate that the parameter goes
2420 both on the stack and in registers. */
2421 if (XEXP (XVECEXP (src, 0, 0), 0))
2426 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2428 /* Copy the (probable) hard regs into pseudos. */
2429 for (i = start; i < XVECLEN (src, 0); i++)
2431 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2432 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2433 emit_move_insn (tmps[i], reg);
2437 /* If we won't be storing directly into memory, protect the real destination
2438 from strange tricks we might play. */
2440 if (GET_CODE (dst) == PARALLEL)
2444 /* We can get a PARALLEL dst if there is a conditional expression in
2445 a return statement. In that case, the dst and src are the same,
2446 so no action is necessary. */
2447 if (rtx_equal_p (dst, src))
2450 /* It is unclear if we can ever reach here, but we may as well handle
2451 it. Allocate a temporary, and split this into a store/load to/from
2454 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2455 emit_group_store (temp, src, type, ssize);
2456 emit_group_load (dst, temp, type, ssize);
2459 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2461 dst = gen_reg_rtx (GET_MODE (orig_dst));
2462 /* Make life a bit easier for combine. */
2463 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2466 /* Process the pieces. */
2467 for (i = start; i < XVECLEN (src, 0); i++)
2469 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2470 enum machine_mode mode = GET_MODE (tmps[i]);
2471 unsigned int bytelen = GET_MODE_SIZE (mode);
2474 /* Handle trailing fragments that run over the size of the struct. */
2475 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2477 /* store_bit_field always takes its value from the lsb.
2478 Move the fragment to the lsb if it's not already there. */
2480 #ifdef BLOCK_REG_PADDING
2481 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2482 == (BYTES_BIG_ENDIAN ? upward : downward)
2488 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2489 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2490 tmps[i], 0, OPTAB_WIDEN);
2492 bytelen = ssize - bytepos;
2495 if (GET_CODE (dst) == CONCAT)
2497 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2498 dest = XEXP (dst, 0);
2499 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2501 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2502 dest = XEXP (dst, 1);
2504 else if (bytepos == 0 && XVECLEN (src, 0))
2506 dest = assign_stack_temp (GET_MODE (dest),
2507 GET_MODE_SIZE (GET_MODE (dest)), 0);
2508 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2517 /* Optimize the access just a bit. */
2518 if (GET_CODE (dest) == MEM
2519 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2520 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2521 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2522 && bytelen == GET_MODE_SIZE (mode))
2523 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2525 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2526 mode, tmps[i], ssize);
2531 /* Copy from the pseudo into the (probable) hard reg. */
2532 if (orig_dst != dst)
2533 emit_move_insn (orig_dst, dst);
2536 /* Generate code to copy a BLKmode object of TYPE out of a
2537 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2538 is null, a stack temporary is created. TGTBLK is returned.
2540 The primary purpose of this routine is to handle functions
2541 that return BLKmode structures in registers. Some machines
2542 (the PA for example) want to return all small structures
2543 in registers regardless of the structure's alignment. */
2546 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2548 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2549 rtx src = NULL, dst = NULL;
2550 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2551 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2555 tgtblk = assign_temp (build_qualified_type (type,
2557 | TYPE_QUAL_CONST)),
2559 preserve_temp_slots (tgtblk);
2562 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2563 into a new pseudo which is a full word. */
2565 if (GET_MODE (srcreg) != BLKmode
2566 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2567 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2569 /* Structures whose size is not a multiple of a word are aligned
2570 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2571 machine, this means we must skip the empty high order bytes when
2572 calculating the bit offset. */
2573 if (BYTES_BIG_ENDIAN
2574 && bytes % UNITS_PER_WORD)
2575 big_endian_correction
2576 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2578 /* Copy the structure BITSIZE bites at a time.
2580 We could probably emit more efficient code for machines which do not use
2581 strict alignment, but it doesn't seem worth the effort at the current
2583 for (bitpos = 0, xbitpos = big_endian_correction;
2584 bitpos < bytes * BITS_PER_UNIT;
2585 bitpos += bitsize, xbitpos += bitsize)
2587 /* We need a new source operand each time xbitpos is on a
2588 word boundary and when xbitpos == big_endian_correction
2589 (the first time through). */
2590 if (xbitpos % BITS_PER_WORD == 0
2591 || xbitpos == big_endian_correction)
2592 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2595 /* We need a new destination operand each time bitpos is on
2597 if (bitpos % BITS_PER_WORD == 0)
2598 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2600 /* Use xbitpos for the source extraction (right justified) and
2601 xbitpos for the destination store (left justified). */
2602 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2603 extract_bit_field (src, bitsize,
2604 xbitpos % BITS_PER_WORD, 1,
2605 NULL_RTX, word_mode, word_mode,
2613 /* Add a USE expression for REG to the (possibly empty) list pointed
2614 to by CALL_FUSAGE. REG must denote a hard register. */
2617 use_reg (rtx *call_fusage, rtx reg)
2619 if (GET_CODE (reg) != REG
2620 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2624 = gen_rtx_EXPR_LIST (VOIDmode,
2625 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2628 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2629 starting at REGNO. All of these registers must be hard registers. */
2632 use_regs (rtx *call_fusage, int regno, int nregs)
2636 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2639 for (i = 0; i < nregs; i++)
2640 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2643 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2644 PARALLEL REGS. This is for calls that pass values in multiple
2645 non-contiguous locations. The Irix 6 ABI has examples of this. */
2648 use_group_regs (rtx *call_fusage, rtx regs)
2652 for (i = 0; i < XVECLEN (regs, 0); i++)
2654 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2656 /* A NULL entry means the parameter goes both on the stack and in
2657 registers. This can also be a MEM for targets that pass values
2658 partially on the stack and partially in registers. */
2659 if (reg != 0 && GET_CODE (reg) == REG)
2660 use_reg (call_fusage, reg);
2665 /* Determine whether the LEN bytes generated by CONSTFUN can be
2666 stored to memory using several move instructions. CONSTFUNDATA is
2667 a pointer which will be passed as argument in every CONSTFUN call.
2668 ALIGN is maximum alignment we can assume. Return nonzero if a
2669 call to store_by_pieces should succeed. */
2672 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2673 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2674 void *constfundata, unsigned int align)
2676 unsigned HOST_WIDE_INT max_size, l;
2677 HOST_WIDE_INT offset = 0;
2678 enum machine_mode mode, tmode;
2679 enum insn_code icode;
2686 if (! STORE_BY_PIECES_P (len, align))
2689 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2690 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2691 align = MOVE_MAX * BITS_PER_UNIT;
2693 /* We would first store what we can in the largest integer mode, then go to
2694 successively smaller modes. */
2697 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2702 max_size = STORE_MAX_PIECES + 1;
2703 while (max_size > 1)
2705 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2706 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2707 if (GET_MODE_SIZE (tmode) < max_size)
2710 if (mode == VOIDmode)
2713 icode = mov_optab->handlers[(int) mode].insn_code;
2714 if (icode != CODE_FOR_nothing
2715 && align >= GET_MODE_ALIGNMENT (mode))
2717 unsigned int size = GET_MODE_SIZE (mode);
2724 cst = (*constfun) (constfundata, offset, mode);
2725 if (!LEGITIMATE_CONSTANT_P (cst))
2735 max_size = GET_MODE_SIZE (mode);
2738 /* The code above should have handled everything. */
2746 /* Generate several move instructions to store LEN bytes generated by
2747 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2748 pointer which will be passed as argument in every CONSTFUN call.
2749 ALIGN is maximum alignment we can assume.
2750 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2751 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2755 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2756 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2757 void *constfundata, unsigned int align, int endp)
2759 struct store_by_pieces data;
2768 if (! STORE_BY_PIECES_P (len, align))
2770 to = protect_from_queue (to, 1);
2771 data.constfun = constfun;
2772 data.constfundata = constfundata;
2775 store_by_pieces_1 (&data, align);
2786 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2787 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2789 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2792 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2799 to1 = adjust_address (data.to, QImode, data.offset);
2807 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2808 rtx with BLKmode). The caller must pass TO through protect_from_queue
2809 before calling. ALIGN is maximum alignment we can assume. */
2812 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2814 struct store_by_pieces data;
2819 data.constfun = clear_by_pieces_1;
2820 data.constfundata = NULL;
2823 store_by_pieces_1 (&data, align);
2826 /* Callback routine for clear_by_pieces.
2827 Return const0_rtx unconditionally. */
2830 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2831 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2832 enum machine_mode mode ATTRIBUTE_UNUSED)
2837 /* Subroutine of clear_by_pieces and store_by_pieces.
2838 Generate several move instructions to store LEN bytes of block TO. (A MEM
2839 rtx with BLKmode). The caller must pass TO through protect_from_queue
2840 before calling. ALIGN is maximum alignment we can assume. */
2843 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2844 unsigned int align ATTRIBUTE_UNUSED)
2846 rtx to_addr = XEXP (data->to, 0);
2847 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2848 enum machine_mode mode = VOIDmode, tmode;
2849 enum insn_code icode;
2852 data->to_addr = to_addr;
2854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2857 data->explicit_inc_to = 0;
2859 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2861 data->offset = data->len;
2863 /* If storing requires more than two move insns,
2864 copy addresses to registers (to make displacements shorter)
2865 and use post-increment if available. */
2866 if (!data->autinc_to
2867 && move_by_pieces_ninsns (data->len, align) > 2)
2869 /* Determine the main mode we'll be using. */
2870 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2871 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2872 if (GET_MODE_SIZE (tmode) < max_size)
2875 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2877 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2878 data->autinc_to = 1;
2879 data->explicit_inc_to = -1;
2882 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2883 && ! data->autinc_to)
2885 data->to_addr = copy_addr_to_reg (to_addr);
2886 data->autinc_to = 1;
2887 data->explicit_inc_to = 1;
2890 if ( !data->autinc_to && CONSTANT_P (to_addr))
2891 data->to_addr = copy_addr_to_reg (to_addr);
2894 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2895 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2896 align = MOVE_MAX * BITS_PER_UNIT;
2898 /* First store what we can in the largest integer mode, then go to
2899 successively smaller modes. */
2901 while (max_size > 1)
2903 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2904 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2905 if (GET_MODE_SIZE (tmode) < max_size)
2908 if (mode == VOIDmode)
2911 icode = mov_optab->handlers[(int) mode].insn_code;
2912 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2913 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2915 max_size = GET_MODE_SIZE (mode);
2918 /* The code above should have handled everything. */
2923 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2924 with move instructions for mode MODE. GENFUN is the gen_... function
2925 to make a move insn for that mode. DATA has all the other info. */
2928 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2929 struct store_by_pieces *data)
2931 unsigned int size = GET_MODE_SIZE (mode);
2934 while (data->len >= size)
2937 data->offset -= size;
2939 if (data->autinc_to)
2940 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2943 to1 = adjust_address (data->to, mode, data->offset);
2945 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2946 emit_insn (gen_add2_insn (data->to_addr,
2947 GEN_INT (-(HOST_WIDE_INT) size)));
2949 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2950 emit_insn ((*genfun) (to1, cst));
2952 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2953 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2955 if (! data->reverse)
2956 data->offset += size;
2962 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2963 its length in bytes. */
2966 clear_storage (rtx object, rtx size)
2969 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2970 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2972 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2973 just move a zero. Otherwise, do this a piece at a time. */
2974 if (GET_MODE (object) != BLKmode
2975 && GET_CODE (size) == CONST_INT
2976 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2977 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2980 object = protect_from_queue (object, 1);
2981 size = protect_from_queue (size, 0);
2983 if (size == const0_rtx)
2985 else if (GET_CODE (size) == CONST_INT
2986 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2987 clear_by_pieces (object, INTVAL (size), align);
2988 else if (clear_storage_via_clrstr (object, size, align))
2991 retval = clear_storage_via_libcall (object, size);
2997 /* A subroutine of clear_storage. Expand a clrstr pattern;
2998 return true if successful. */
3001 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
3003 /* Try the most limited insn first, because there's no point
3004 including more than one in the machine description unless
3005 the more limited one has some advantage. */
3007 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3008 enum machine_mode mode;
3010 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3011 mode = GET_MODE_WIDER_MODE (mode))
3013 enum insn_code code = clrstr_optab[(int) mode];
3014 insn_operand_predicate_fn pred;
3016 if (code != CODE_FOR_nothing
3017 /* We don't need MODE to be narrower than
3018 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3019 the mode mask, as it is returned by the macro, it will
3020 definitely be less than the actual mode mask. */
3021 && ((GET_CODE (size) == CONST_INT
3022 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3023 <= (GET_MODE_MASK (mode) >> 1)))
3024 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3025 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3026 || (*pred) (object, BLKmode))
3027 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3028 || (*pred) (opalign, VOIDmode)))
3031 rtx last = get_last_insn ();
3034 op1 = convert_to_mode (mode, size, 1);
3035 pred = insn_data[(int) code].operand[1].predicate;
3036 if (pred != 0 && ! (*pred) (op1, mode))
3037 op1 = copy_to_mode_reg (mode, op1);
3039 pat = GEN_FCN ((int) code) (object, op1, opalign);
3046 delete_insns_since (last);
3053 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3054 Return the return value of memset, 0 otherwise. */
3057 clear_storage_via_libcall (rtx object, rtx size)
3059 tree call_expr, arg_list, fn, object_tree, size_tree;
3060 enum machine_mode size_mode;
3063 /* OBJECT or SIZE may have been passed through protect_from_queue.
3065 It is unsafe to save the value generated by protect_from_queue
3066 and reuse it later. Consider what happens if emit_queue is
3067 called before the return value from protect_from_queue is used.
3069 Expansion of the CALL_EXPR below will call emit_queue before
3070 we are finished emitting RTL for argument setup. So if we are
3071 not careful we could get the wrong value for an argument.
3073 To avoid this problem we go ahead and emit code to copy OBJECT
3074 and SIZE into new pseudos. We can then place those new pseudos
3075 into an RTL_EXPR and use them later, even after a call to
3078 Note this is not strictly needed for library calls since they
3079 do not call emit_queue before loading their arguments. However,
3080 we may need to have library calls call emit_queue in the future
3081 since failing to do so could cause problems for targets which
3082 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3084 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3086 if (TARGET_MEM_FUNCTIONS)
3087 size_mode = TYPE_MODE (sizetype);
3089 size_mode = TYPE_MODE (unsigned_type_node);
3090 size = convert_to_mode (size_mode, size, 1);
3091 size = copy_to_mode_reg (size_mode, size);
3093 /* It is incorrect to use the libcall calling conventions to call
3094 memset in this context. This could be a user call to memset and
3095 the user may wish to examine the return value from memset. For
3096 targets where libcalls and normal calls have different conventions
3097 for returning pointers, we could end up generating incorrect code.
3099 For convenience, we generate the call to bzero this way as well. */
3101 object_tree = make_tree (ptr_type_node, object);
3102 if (TARGET_MEM_FUNCTIONS)
3103 size_tree = make_tree (sizetype, size);
3105 size_tree = make_tree (unsigned_type_node, size);
3107 fn = clear_storage_libcall_fn (true);
3108 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3109 if (TARGET_MEM_FUNCTIONS)
3110 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3111 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3113 /* Now we have to build up the CALL_EXPR itself. */
3114 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3115 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3116 call_expr, arg_list, NULL_TREE);
3118 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3120 /* If we are initializing a readonly value, show the above call
3121 clobbered it. Otherwise, a load from it may erroneously be
3122 hoisted from a loop. */
3123 if (RTX_UNCHANGING_P (object))
3124 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3126 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3129 /* A subroutine of clear_storage_via_libcall. Create the tree node
3130 for the function we use for block clears. The first time FOR_CALL
3131 is true, we call assemble_external. */
3133 static GTY(()) tree block_clear_fn;
3136 init_block_clear_fn (const char *asmspec)
3138 if (!block_clear_fn)
3142 if (TARGET_MEM_FUNCTIONS)
3144 fn = get_identifier ("memset");
3145 args = build_function_type_list (ptr_type_node, ptr_type_node,
3146 integer_type_node, sizetype,
3151 fn = get_identifier ("bzero");
3152 args = build_function_type_list (void_type_node, ptr_type_node,
3153 unsigned_type_node, NULL_TREE);
3156 fn = build_decl (FUNCTION_DECL, fn, args);
3157 DECL_EXTERNAL (fn) = 1;
3158 TREE_PUBLIC (fn) = 1;
3159 DECL_ARTIFICIAL (fn) = 1;
3160 TREE_NOTHROW (fn) = 1;
3162 block_clear_fn = fn;
3167 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3168 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3173 clear_storage_libcall_fn (int for_call)
3175 static bool emitted_extern;
3177 if (!block_clear_fn)
3178 init_block_clear_fn (NULL);
3180 if (for_call && !emitted_extern)
3182 emitted_extern = true;
3183 make_decl_rtl (block_clear_fn, NULL);
3184 assemble_external (block_clear_fn);
3187 return block_clear_fn;
3190 /* Generate code to copy Y into X.
3191 Both Y and X must have the same mode, except that
3192 Y can be a constant with VOIDmode.
3193 This mode cannot be BLKmode; use emit_block_move for that.
3195 Return the last instruction emitted. */
3198 emit_move_insn (rtx x, rtx y)
3200 enum machine_mode mode = GET_MODE (x);
3201 rtx y_cst = NULL_RTX;
3204 x = protect_from_queue (x, 1);
3205 y = protect_from_queue (y, 0);
3207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3210 /* Never force constant_p_rtx to memory. */
3211 if (GET_CODE (y) == CONSTANT_P_RTX)
3213 else if (CONSTANT_P (y))
3216 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3217 && (last_insn = compress_float_constant (x, y)))
3222 if (!LEGITIMATE_CONSTANT_P (y))
3224 y = force_const_mem (mode, y);
3226 /* If the target's cannot_force_const_mem prevented the spill,
3227 assume that the target's move expanders will also take care
3228 of the non-legitimate constant. */
3234 /* If X or Y are memory references, verify that their addresses are valid
3236 if (GET_CODE (x) == MEM
3237 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3238 && ! push_operand (x, GET_MODE (x)))
3240 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3241 x = validize_mem (x);
3243 if (GET_CODE (y) == MEM
3244 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3246 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3247 y = validize_mem (y);
3249 if (mode == BLKmode)
3252 last_insn = emit_move_insn_1 (x, y);
3254 if (y_cst && GET_CODE (x) == REG
3255 && (set = single_set (last_insn)) != NULL_RTX
3256 && SET_DEST (set) == x
3257 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3258 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3263 /* Low level part of emit_move_insn.
3264 Called just like emit_move_insn, but assumes X and Y
3265 are basically valid. */
3268 emit_move_insn_1 (rtx x, rtx y)
3270 enum machine_mode mode = GET_MODE (x);
3271 enum machine_mode submode;
3272 enum mode_class class = GET_MODE_CLASS (mode);
3274 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3277 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3279 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3281 /* Expand complex moves by moving real part and imag part, if possible. */
3282 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3283 && BLKmode != (submode = GET_MODE_INNER (mode))
3284 && (mov_optab->handlers[(int) submode].insn_code
3285 != CODE_FOR_nothing))
3287 /* Don't split destination if it is a stack push. */
3288 int stack = push_operand (x, GET_MODE (x));
3290 #ifdef PUSH_ROUNDING
3291 /* In case we output to the stack, but the size is smaller than the
3292 machine can push exactly, we need to use move instructions. */
3294 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3295 != GET_MODE_SIZE (submode)))
3298 HOST_WIDE_INT offset1, offset2;
3300 /* Do not use anti_adjust_stack, since we don't want to update
3301 stack_pointer_delta. */
3302 temp = expand_binop (Pmode,
3303 #ifdef STACK_GROWS_DOWNWARD
3311 (GET_MODE_SIZE (GET_MODE (x)))),
3312 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3314 if (temp != stack_pointer_rtx)
3315 emit_move_insn (stack_pointer_rtx, temp);
3317 #ifdef STACK_GROWS_DOWNWARD
3319 offset2 = GET_MODE_SIZE (submode);
3321 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3322 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3323 + GET_MODE_SIZE (submode));
3326 emit_move_insn (change_address (x, submode,
3327 gen_rtx_PLUS (Pmode,
3329 GEN_INT (offset1))),
3330 gen_realpart (submode, y));
3331 emit_move_insn (change_address (x, submode,
3332 gen_rtx_PLUS (Pmode,
3334 GEN_INT (offset2))),
3335 gen_imagpart (submode, y));
3339 /* If this is a stack, push the highpart first, so it
3340 will be in the argument order.
3342 In that case, change_address is used only to convert
3343 the mode, not to change the address. */
3346 /* Note that the real part always precedes the imag part in memory
3347 regardless of machine's endianness. */
3348 #ifdef STACK_GROWS_DOWNWARD
3349 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3350 gen_imagpart (submode, y));
3351 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3352 gen_realpart (submode, y));
3354 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3355 gen_realpart (submode, y));
3356 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3357 gen_imagpart (submode, y));
3362 rtx realpart_x, realpart_y;
3363 rtx imagpart_x, imagpart_y;
3365 /* If this is a complex value with each part being smaller than a
3366 word, the usual calling sequence will likely pack the pieces into
3367 a single register. Unfortunately, SUBREG of hard registers only
3368 deals in terms of words, so we have a problem converting input
3369 arguments to the CONCAT of two registers that is used elsewhere
3370 for complex values. If this is before reload, we can copy it into
3371 memory and reload. FIXME, we should see about using extract and
3372 insert on integer registers, but complex short and complex char
3373 variables should be rarely used. */
3374 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3375 && (reload_in_progress | reload_completed) == 0)
3378 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3380 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3382 if (packed_dest_p || packed_src_p)
3384 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3385 ? MODE_FLOAT : MODE_INT);
3387 enum machine_mode reg_mode
3388 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3390 if (reg_mode != BLKmode)
3392 rtx mem = assign_stack_temp (reg_mode,
3393 GET_MODE_SIZE (mode), 0);
3394 rtx cmem = adjust_address (mem, mode, 0);
3397 = N_("function using short complex types cannot be inline");
3401 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3403 emit_move_insn_1 (cmem, y);
3404 return emit_move_insn_1 (sreg, mem);
3408 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3410 emit_move_insn_1 (mem, sreg);
3411 return emit_move_insn_1 (x, cmem);
3417 realpart_x = gen_realpart (submode, x);
3418 realpart_y = gen_realpart (submode, y);
3419 imagpart_x = gen_imagpart (submode, x);
3420 imagpart_y = gen_imagpart (submode, y);
3422 /* Show the output dies here. This is necessary for SUBREGs
3423 of pseudos since we cannot track their lifetimes correctly;
3424 hard regs shouldn't appear here except as return values.
3425 We never want to emit such a clobber after reload. */
3427 && ! (reload_in_progress || reload_completed)
3428 && (GET_CODE (realpart_x) == SUBREG
3429 || GET_CODE (imagpart_x) == SUBREG))
3430 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3432 emit_move_insn (realpart_x, realpart_y);
3433 emit_move_insn (imagpart_x, imagpart_y);
3436 return get_last_insn ();
3439 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3440 find a mode to do it in. If we have a movcc, use it. Otherwise,
3441 find the MODE_INT mode of the same width. */
3442 else if (GET_MODE_CLASS (mode) == MODE_CC
3443 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3445 enum insn_code insn_code;
3446 enum machine_mode tmode = VOIDmode;
3450 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3453 for (tmode = QImode; tmode != VOIDmode;
3454 tmode = GET_MODE_WIDER_MODE (tmode))
3455 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3458 if (tmode == VOIDmode)
3461 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3462 may call change_address which is not appropriate if we were
3463 called when a reload was in progress. We don't have to worry
3464 about changing the address since the size in bytes is supposed to
3465 be the same. Copy the MEM to change the mode and move any
3466 substitutions from the old MEM to the new one. */
3468 if (reload_in_progress)
3470 x = gen_lowpart_common (tmode, x1);
3471 if (x == 0 && GET_CODE (x1) == MEM)
3473 x = adjust_address_nv (x1, tmode, 0);
3474 copy_replacements (x1, x);
3477 y = gen_lowpart_common (tmode, y1);
3478 if (y == 0 && GET_CODE (y1) == MEM)
3480 y = adjust_address_nv (y1, tmode, 0);
3481 copy_replacements (y1, y);
3486 x = gen_lowpart (tmode, x);
3487 y = gen_lowpart (tmode, y);
3490 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3491 return emit_insn (GEN_FCN (insn_code) (x, y));
3494 /* This will handle any multi-word or full-word mode that lacks a move_insn
3495 pattern. However, you will get better code if you define such patterns,
3496 even if they must turn into multiple assembler instructions. */
3497 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3504 #ifdef PUSH_ROUNDING
3506 /* If X is a push on the stack, do the push now and replace
3507 X with a reference to the stack pointer. */
3508 if (push_operand (x, GET_MODE (x)))
3513 /* Do not use anti_adjust_stack, since we don't want to update
3514 stack_pointer_delta. */
3515 temp = expand_binop (Pmode,
3516 #ifdef STACK_GROWS_DOWNWARD
3524 (GET_MODE_SIZE (GET_MODE (x)))),
3525 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3527 if (temp != stack_pointer_rtx)
3528 emit_move_insn (stack_pointer_rtx, temp);
3530 code = GET_CODE (XEXP (x, 0));
3532 /* Just hope that small offsets off SP are OK. */
3533 if (code == POST_INC)
3534 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3535 GEN_INT (-((HOST_WIDE_INT)
3536 GET_MODE_SIZE (GET_MODE (x)))));
3537 else if (code == POST_DEC)
3538 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3539 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3541 temp = stack_pointer_rtx;
3543 x = change_address (x, VOIDmode, temp);
3547 /* If we are in reload, see if either operand is a MEM whose address
3548 is scheduled for replacement. */
3549 if (reload_in_progress && GET_CODE (x) == MEM
3550 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3551 x = replace_equiv_address_nv (x, inner);
3552 if (reload_in_progress && GET_CODE (y) == MEM
3553 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3554 y = replace_equiv_address_nv (y, inner);
3560 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3563 rtx xpart = operand_subword (x, i, 1, mode);
3564 rtx ypart = operand_subword (y, i, 1, mode);
3566 /* If we can't get a part of Y, put Y into memory if it is a
3567 constant. Otherwise, force it into a register. If we still
3568 can't get a part of Y, abort. */
3569 if (ypart == 0 && CONSTANT_P (y))
3571 y = force_const_mem (mode, y);
3572 ypart = operand_subword (y, i, 1, mode);
3574 else if (ypart == 0)
3575 ypart = operand_subword_force (y, i, mode);
3577 if (xpart == 0 || ypart == 0)
3580 need_clobber |= (GET_CODE (xpart) == SUBREG);
3582 last_insn = emit_move_insn (xpart, ypart);
3588 /* Show the output dies here. This is necessary for SUBREGs
3589 of pseudos since we cannot track their lifetimes correctly;
3590 hard regs shouldn't appear here except as return values.
3591 We never want to emit such a clobber after reload. */
3593 && ! (reload_in_progress || reload_completed)
3594 && need_clobber != 0)
3595 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3605 /* If Y is representable exactly in a narrower mode, and the target can
3606 perform the extension directly from constant or memory, then emit the
3607 move as an extension. */
3610 compress_float_constant (rtx x, rtx y)
3612 enum machine_mode dstmode = GET_MODE (x);
3613 enum machine_mode orig_srcmode = GET_MODE (y);
3614 enum machine_mode srcmode;
3617 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3619 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3620 srcmode != orig_srcmode;
3621 srcmode = GET_MODE_WIDER_MODE (srcmode))
3624 rtx trunc_y, last_insn;
3626 /* Skip if the target can't extend this way. */
3627 ic = can_extend_p (dstmode, srcmode, 0);
3628 if (ic == CODE_FOR_nothing)
3631 /* Skip if the narrowed value isn't exact. */
3632 if (! exact_real_truncate (srcmode, &r))
3635 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3637 if (LEGITIMATE_CONSTANT_P (trunc_y))
3639 /* Skip if the target needs extra instructions to perform
3641 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3644 else if (float_extend_from_mem[dstmode][srcmode])
3645 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3649 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3650 last_insn = get_last_insn ();
3652 if (GET_CODE (x) == REG)
3653 set_unique_reg_note (last_insn, REG_EQUAL, y);
3661 /* Pushing data onto the stack. */
3663 /* Push a block of length SIZE (perhaps variable)
3664 and return an rtx to address the beginning of the block.
3665 Note that it is not possible for the value returned to be a QUEUED.
3666 The value may be virtual_outgoing_args_rtx.
3668 EXTRA is the number of bytes of padding to push in addition to SIZE.
3669 BELOW nonzero means this padding comes at low addresses;
3670 otherwise, the padding comes at high addresses. */
3673 push_block (rtx size, int extra, int below)
3677 size = convert_modes (Pmode, ptr_mode, size, 1);
3678 if (CONSTANT_P (size))
3679 anti_adjust_stack (plus_constant (size, extra));
3680 else if (GET_CODE (size) == REG && extra == 0)
3681 anti_adjust_stack (size);
3684 temp = copy_to_mode_reg (Pmode, size);
3686 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3687 temp, 0, OPTAB_LIB_WIDEN);
3688 anti_adjust_stack (temp);
3691 #ifndef STACK_GROWS_DOWNWARD
3697 temp = virtual_outgoing_args_rtx;
3698 if (extra != 0 && below)
3699 temp = plus_constant (temp, extra);
3703 if (GET_CODE (size) == CONST_INT)
3704 temp = plus_constant (virtual_outgoing_args_rtx,
3705 -INTVAL (size) - (below ? 0 : extra));
3706 else if (extra != 0 && !below)
3707 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3708 negate_rtx (Pmode, plus_constant (size, extra)));
3710 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3711 negate_rtx (Pmode, size));
3714 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3717 #ifdef PUSH_ROUNDING
3719 /* Emit single push insn. */
3722 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3725 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3727 enum insn_code icode;
3728 insn_operand_predicate_fn pred;
3730 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3731 /* If there is push pattern, use it. Otherwise try old way of throwing
3732 MEM representing push operation to move expander. */
3733 icode = push_optab->handlers[(int) mode].insn_code;
3734 if (icode != CODE_FOR_nothing)
3736 if (((pred = insn_data[(int) icode].operand[0].predicate)
3737 && !((*pred) (x, mode))))
3738 x = force_reg (mode, x);
3739 emit_insn (GEN_FCN (icode) (x));
3742 if (GET_MODE_SIZE (mode) == rounded_size)
3743 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3744 /* If we are to pad downward, adjust the stack pointer first and
3745 then store X into the stack location using an offset. This is
3746 because emit_move_insn does not know how to pad; it does not have
3748 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3750 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3751 HOST_WIDE_INT offset;
3753 emit_move_insn (stack_pointer_rtx,
3754 expand_binop (Pmode,
3755 #ifdef STACK_GROWS_DOWNWARD
3761 GEN_INT (rounded_size),
3762 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3764 offset = (HOST_WIDE_INT) padding_size;
3765 #ifdef STACK_GROWS_DOWNWARD
3766 if (STACK_PUSH_CODE == POST_DEC)
3767 /* We have already decremented the stack pointer, so get the
3769 offset += (HOST_WIDE_INT) rounded_size;
3771 if (STACK_PUSH_CODE == POST_INC)
3772 /* We have already incremented the stack pointer, so get the
3774 offset -= (HOST_WIDE_INT) rounded_size;
3776 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3780 #ifdef STACK_GROWS_DOWNWARD
3781 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3782 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3783 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3785 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3786 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3787 GEN_INT (rounded_size));
3789 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3792 dest = gen_rtx_MEM (mode, dest_addr);
3796 set_mem_attributes (dest, type, 1);
3798 if (flag_optimize_sibling_calls)
3799 /* Function incoming arguments may overlap with sibling call
3800 outgoing arguments and we cannot allow reordering of reads
3801 from function arguments with stores to outgoing arguments
3802 of sibling calls. */
3803 set_mem_alias_set (dest, 0);
3805 emit_move_insn (dest, x);
3809 /* Generate code to push X onto the stack, assuming it has mode MODE and
3811 MODE is redundant except when X is a CONST_INT (since they don't
3813 SIZE is an rtx for the size of data to be copied (in bytes),
3814 needed only if X is BLKmode.
3816 ALIGN (in bits) is maximum alignment we can assume.
3818 If PARTIAL and REG are both nonzero, then copy that many of the first
3819 words of X into registers starting with REG, and push the rest of X.
3820 The amount of space pushed is decreased by PARTIAL words,
3821 rounded *down* to a multiple of PARM_BOUNDARY.
3822 REG must be a hard register in this case.
3823 If REG is zero but PARTIAL is not, take any all others actions for an
3824 argument partially in registers, but do not actually load any
3827 EXTRA is the amount in bytes of extra space to leave next to this arg.
3828 This is ignored if an argument block has already been allocated.
3830 On a machine that lacks real push insns, ARGS_ADDR is the address of
3831 the bottom of the argument block for this call. We use indexing off there
3832 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3833 argument block has not been preallocated.
3835 ARGS_SO_FAR is the size of args previously pushed for this call.
3837 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3838 for arguments passed in registers. If nonzero, it will be the number
3839 of bytes required. */
3842 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3843 unsigned int align, int partial, rtx reg, int extra,
3844 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3848 enum direction stack_direction
3849 #ifdef STACK_GROWS_DOWNWARD
3855 /* Decide where to pad the argument: `downward' for below,
3856 `upward' for above, or `none' for don't pad it.
3857 Default is below for small data on big-endian machines; else above. */
3858 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3860 /* Invert direction if stack is post-decrement.
3862 if (STACK_PUSH_CODE == POST_DEC)
3863 if (where_pad != none)
3864 where_pad = (where_pad == downward ? upward : downward);
3866 xinner = x = protect_from_queue (x, 0);
3868 if (mode == BLKmode)
3870 /* Copy a block into the stack, entirely or partially. */
3873 int used = partial * UNITS_PER_WORD;
3874 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3882 /* USED is now the # of bytes we need not copy to the stack
3883 because registers will take care of them. */
3886 xinner = adjust_address (xinner, BLKmode, used);
3888 /* If the partial register-part of the arg counts in its stack size,
3889 skip the part of stack space corresponding to the registers.
3890 Otherwise, start copying to the beginning of the stack space,
3891 by setting SKIP to 0. */
3892 skip = (reg_parm_stack_space == 0) ? 0 : used;
3894 #ifdef PUSH_ROUNDING
3895 /* Do it with several push insns if that doesn't take lots of insns
3896 and if there is no difficulty with push insns that skip bytes
3897 on the stack for alignment purposes. */
3900 && GET_CODE (size) == CONST_INT
3902 && MEM_ALIGN (xinner) >= align
3903 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3904 /* Here we avoid the case of a structure whose weak alignment
3905 forces many pushes of a small amount of data,
3906 and such small pushes do rounding that causes trouble. */
3907 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3908 || align >= BIGGEST_ALIGNMENT
3909 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3910 == (align / BITS_PER_UNIT)))
3911 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3913 /* Push padding now if padding above and stack grows down,
3914 or if padding below and stack grows up.
3915 But if space already allocated, this has already been done. */
3916 if (extra && args_addr == 0
3917 && where_pad != none && where_pad != stack_direction)
3918 anti_adjust_stack (GEN_INT (extra));
3920 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3923 #endif /* PUSH_ROUNDING */
3927 /* Otherwise make space on the stack and copy the data
3928 to the address of that space. */
3930 /* Deduct words put into registers from the size we must copy. */
3933 if (GET_CODE (size) == CONST_INT)
3934 size = GEN_INT (INTVAL (size) - used);
3936 size = expand_binop (GET_MODE (size), sub_optab, size,
3937 GEN_INT (used), NULL_RTX, 0,
3941 /* Get the address of the stack space.
3942 In this case, we do not deal with EXTRA separately.
3943 A single stack adjust will do. */
3946 temp = push_block (size, extra, where_pad == downward);
3949 else if (GET_CODE (args_so_far) == CONST_INT)
3950 temp = memory_address (BLKmode,
3951 plus_constant (args_addr,
3952 skip + INTVAL (args_so_far)));
3954 temp = memory_address (BLKmode,
3955 plus_constant (gen_rtx_PLUS (Pmode,
3960 if (!ACCUMULATE_OUTGOING_ARGS)
3962 /* If the source is referenced relative to the stack pointer,
3963 copy it to another register to stabilize it. We do not need
3964 to do this if we know that we won't be changing sp. */
3966 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3967 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3968 temp = copy_to_reg (temp);
3971 target = gen_rtx_MEM (BLKmode, temp);
3975 set_mem_attributes (target, type, 1);
3976 /* Function incoming arguments may overlap with sibling call
3977 outgoing arguments and we cannot allow reordering of reads
3978 from function arguments with stores to outgoing arguments
3979 of sibling calls. */
3980 set_mem_alias_set (target, 0);
3983 /* ALIGN may well be better aligned than TYPE, e.g. due to
3984 PARM_BOUNDARY. Assume the caller isn't lying. */
3985 set_mem_align (target, align);
3987 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3990 else if (partial > 0)
3992 /* Scalar partly in registers. */
3994 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3997 /* # words of start of argument
3998 that we must make space for but need not store. */
3999 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4000 int args_offset = INTVAL (args_so_far);
4003 /* Push padding now if padding above and stack grows down,
4004 or if padding below and stack grows up.
4005 But if space already allocated, this has already been done. */
4006 if (extra && args_addr == 0
4007 && where_pad != none && where_pad != stack_direction)
4008 anti_adjust_stack (GEN_INT (extra));
4010 /* If we make space by pushing it, we might as well push
4011 the real data. Otherwise, we can leave OFFSET nonzero
4012 and leave the space uninitialized. */
4016 /* Now NOT_STACK gets the number of words that we don't need to
4017 allocate on the stack. */
4018 not_stack = partial - offset;
4020 /* If the partial register-part of the arg counts in its stack size,
4021 skip the part of stack space corresponding to the registers.
4022 Otherwise, start copying to the beginning of the stack space,
4023 by setting SKIP to 0. */
4024 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4026 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4027 x = validize_mem (force_const_mem (mode, x));
4029 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4030 SUBREGs of such registers are not allowed. */
4031 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4032 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4033 x = copy_to_reg (x);
4035 /* Loop over all the words allocated on the stack for this arg. */
4036 /* We can do it by words, because any scalar bigger than a word
4037 has a size a multiple of a word. */
4038 #ifndef PUSH_ARGS_REVERSED
4039 for (i = not_stack; i < size; i++)
4041 for (i = size - 1; i >= not_stack; i--)
4043 if (i >= not_stack + offset)
4044 emit_push_insn (operand_subword_force (x, i, mode),
4045 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4047 GEN_INT (args_offset + ((i - not_stack + skip)
4049 reg_parm_stack_space, alignment_pad);
4056 /* Push padding now if padding above and stack grows down,
4057 or if padding below and stack grows up.
4058 But if space already allocated, this has already been done. */
4059 if (extra && args_addr == 0
4060 && where_pad != none && where_pad != stack_direction)
4061 anti_adjust_stack (GEN_INT (extra));
4063 #ifdef PUSH_ROUNDING
4064 if (args_addr == 0 && PUSH_ARGS)
4065 emit_single_push_insn (mode, x, type);
4069 if (GET_CODE (args_so_far) == CONST_INT)
4071 = memory_address (mode,
4072 plus_constant (args_addr,
4073 INTVAL (args_so_far)));
4075 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4077 dest = gen_rtx_MEM (mode, addr);
4080 set_mem_attributes (dest, type, 1);
4081 /* Function incoming arguments may overlap with sibling call
4082 outgoing arguments and we cannot allow reordering of reads
4083 from function arguments with stores to outgoing arguments
4084 of sibling calls. */
4085 set_mem_alias_set (dest, 0);
4088 emit_move_insn (dest, x);
4092 /* If part should go in registers, copy that part
4093 into the appropriate registers. Do this now, at the end,
4094 since mem-to-mem copies above may do function calls. */
4095 if (partial > 0 && reg != 0)
4097 /* Handle calls that pass values in multiple non-contiguous locations.
4098 The Irix 6 ABI has examples of this. */
4099 if (GET_CODE (reg) == PARALLEL)
4100 emit_group_load (reg, x, type, -1);
4102 move_block_to_reg (REGNO (reg), x, partial, mode);
4105 if (extra && args_addr == 0 && where_pad == stack_direction)
4106 anti_adjust_stack (GEN_INT (extra));
4108 if (alignment_pad && args_addr == 0)
4109 anti_adjust_stack (alignment_pad);
4112 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4116 get_subtarget (rtx x)
4119 /* Only registers can be subtargets. */
4120 || GET_CODE (x) != REG
4121 /* If the register is readonly, it can't be set more than once. */
4122 || RTX_UNCHANGING_P (x)
4123 /* Don't use hard regs to avoid extending their life. */
4124 || REGNO (x) < FIRST_PSEUDO_REGISTER
4125 /* Avoid subtargets inside loops,
4126 since they hide some invariant expressions. */
4127 || preserve_subexpressions_p ())
4131 /* Expand an assignment that stores the value of FROM into TO.
4132 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4133 (This may contain a QUEUED rtx;
4134 if the value is constant, this rtx is a constant.)
4135 Otherwise, the returned value is NULL_RTX. */
4138 expand_assignment (tree to, tree from, int want_value)
4143 /* Don't crash if the lhs of the assignment was erroneous. */
4145 if (TREE_CODE (to) == ERROR_MARK)
4147 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4148 return want_value ? result : NULL_RTX;
4151 /* Assignment of a structure component needs special treatment
4152 if the structure component's rtx is not simply a MEM.
4153 Assignment of an array element at a constant index, and assignment of
4154 an array element in an unaligned packed structure field, has the same
4157 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4158 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4159 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4161 enum machine_mode mode1;
4162 HOST_WIDE_INT bitsize, bitpos;
4170 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4171 &unsignedp, &volatilep);
4173 /* If we are going to use store_bit_field and extract_bit_field,
4174 make sure to_rtx will be safe for multiple use. */
4176 if (mode1 == VOIDmode && want_value)
4177 tem = stabilize_reference (tem);
4179 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4183 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4185 if (GET_CODE (to_rtx) != MEM)
4188 #ifdef POINTERS_EXTEND_UNSIGNED
4189 if (GET_MODE (offset_rtx) != Pmode)
4190 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4192 if (GET_MODE (offset_rtx) != ptr_mode)
4193 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4196 /* A constant address in TO_RTX can have VOIDmode, we must not try
4197 to call force_reg for that case. Avoid that case. */
4198 if (GET_CODE (to_rtx) == MEM
4199 && GET_MODE (to_rtx) == BLKmode
4200 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4202 && (bitpos % bitsize) == 0
4203 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4204 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4206 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4210 to_rtx = offset_address (to_rtx, offset_rtx,
4211 highest_pow2_factor_for_type (TREE_TYPE (to),
4215 if (GET_CODE (to_rtx) == MEM)
4217 /* If the field is at offset zero, we could have been given the
4218 DECL_RTX of the parent struct. Don't munge it. */
4219 to_rtx = shallow_copy_rtx (to_rtx);
4221 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4224 /* Deal with volatile and readonly fields. The former is only done
4225 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4226 if (volatilep && GET_CODE (to_rtx) == MEM)
4228 if (to_rtx == orig_to_rtx)
4229 to_rtx = copy_rtx (to_rtx);
4230 MEM_VOLATILE_P (to_rtx) = 1;
4233 if (TREE_CODE (to) == COMPONENT_REF
4234 && TREE_READONLY (TREE_OPERAND (to, 1)))
4236 if (to_rtx == orig_to_rtx)
4237 to_rtx = copy_rtx (to_rtx);
4238 RTX_UNCHANGING_P (to_rtx) = 1;
4241 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4243 if (to_rtx == orig_to_rtx)
4244 to_rtx = copy_rtx (to_rtx);
4245 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4248 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4250 /* Spurious cast for HPUX compiler. */
4251 ? ((enum machine_mode)
4252 TYPE_MODE (TREE_TYPE (to)))
4254 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4256 preserve_temp_slots (result);
4260 /* If the value is meaningful, convert RESULT to the proper mode.
4261 Otherwise, return nothing. */
4262 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4263 TYPE_MODE (TREE_TYPE (from)),
4265 TREE_UNSIGNED (TREE_TYPE (to)))
4269 /* If the rhs is a function call and its value is not an aggregate,
4270 call the function before we start to compute the lhs.
4271 This is needed for correct code for cases such as
4272 val = setjmp (buf) on machines where reference to val
4273 requires loading up part of an address in a separate insn.
4275 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4276 since it might be a promoted variable where the zero- or sign- extension
4277 needs to be done. Handling this in the normal way is safe because no
4278 computation is done before the call. */
4279 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4280 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4281 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4282 && GET_CODE (DECL_RTL (to)) == REG))
4287 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4289 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4291 /* Handle calls that return values in multiple non-contiguous locations.
4292 The Irix 6 ABI has examples of this. */
4293 if (GET_CODE (to_rtx) == PARALLEL)
4294 emit_group_load (to_rtx, value, TREE_TYPE (from),
4295 int_size_in_bytes (TREE_TYPE (from)));
4296 else if (GET_MODE (to_rtx) == BLKmode)
4297 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4300 if (POINTER_TYPE_P (TREE_TYPE (to)))
4301 value = convert_memory_address (GET_MODE (to_rtx), value);
4302 emit_move_insn (to_rtx, value);
4304 preserve_temp_slots (to_rtx);
4307 return want_value ? to_rtx : NULL_RTX;
4310 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4311 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4314 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4316 /* Don't move directly into a return register. */
4317 if (TREE_CODE (to) == RESULT_DECL
4318 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4323 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4325 if (GET_CODE (to_rtx) == PARALLEL)
4326 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4327 int_size_in_bytes (TREE_TYPE (from)));
4329 emit_move_insn (to_rtx, temp);
4331 preserve_temp_slots (to_rtx);
4334 return want_value ? to_rtx : NULL_RTX;
4337 /* In case we are returning the contents of an object which overlaps
4338 the place the value is being stored, use a safe function when copying
4339 a value through a pointer into a structure value return block. */
4340 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4341 && current_function_returns_struct
4342 && !current_function_returns_pcc_struct)
4347 size = expr_size (from);
4348 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4350 if (TARGET_MEM_FUNCTIONS)
4351 emit_library_call (memmove_libfunc, LCT_NORMAL,
4352 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4353 XEXP (from_rtx, 0), Pmode,
4354 convert_to_mode (TYPE_MODE (sizetype),
4355 size, TREE_UNSIGNED (sizetype)),
4356 TYPE_MODE (sizetype));
4358 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4359 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4360 XEXP (to_rtx, 0), Pmode,
4361 convert_to_mode (TYPE_MODE (integer_type_node),
4363 TREE_UNSIGNED (integer_type_node)),
4364 TYPE_MODE (integer_type_node));
4366 preserve_temp_slots (to_rtx);
4369 return want_value ? to_rtx : NULL_RTX;
4372 /* Compute FROM and store the value in the rtx we got. */
4375 result = store_expr (from, to_rtx, want_value);
4376 preserve_temp_slots (result);
4379 return want_value ? result : NULL_RTX;
4382 /* Generate code for computing expression EXP,
4383 and storing the value into TARGET.
4384 TARGET may contain a QUEUED rtx.
4386 If WANT_VALUE & 1 is nonzero, return a copy of the value
4387 not in TARGET, so that we can be sure to use the proper
4388 value in a containing expression even if TARGET has something
4389 else stored in it. If possible, we copy the value through a pseudo
4390 and return that pseudo. Or, if the value is constant, we try to
4391 return the constant. In some cases, we return a pseudo
4392 copied *from* TARGET.
4394 If the mode is BLKmode then we may return TARGET itself.
4395 It turns out that in BLKmode it doesn't cause a problem.
4396 because C has no operators that could combine two different
4397 assignments into the same BLKmode object with different values
4398 with no sequence point. Will other languages need this to
4401 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4402 to catch quickly any cases where the caller uses the value
4403 and fails to set WANT_VALUE.
4405 If WANT_VALUE & 2 is set, this is a store into a call param on the
4406 stack, and block moves may need to be treated specially. */
4409 store_expr (tree exp, rtx target, int want_value)
4412 int dont_return_target = 0;
4413 int dont_store_target = 0;
4415 if (VOID_TYPE_P (TREE_TYPE (exp)))
4417 /* C++ can generate ?: expressions with a throw expression in one
4418 branch and an rvalue in the other. Here, we resolve attempts to
4419 store the throw expression's nonexistent result. */
4422 expand_expr (exp, const0_rtx, VOIDmode, 0);
4425 if (TREE_CODE (exp) == COMPOUND_EXPR)
4427 /* Perform first part of compound expression, then assign from second
4429 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4430 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4432 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4434 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4436 /* For conditional expression, get safe form of the target. Then
4437 test the condition, doing the appropriate assignment on either
4438 side. This avoids the creation of unnecessary temporaries.
4439 For non-BLKmode, it is more efficient not to do this. */
4441 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4444 target = protect_from_queue (target, 1);
4446 do_pending_stack_adjust ();
4448 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4449 start_cleanup_deferral ();
4450 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4451 end_cleanup_deferral ();
4453 emit_jump_insn (gen_jump (lab2));
4456 start_cleanup_deferral ();
4457 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4458 end_cleanup_deferral ();
4463 return want_value & 1 ? target : NULL_RTX;
4465 else if (queued_subexp_p (target))
4466 /* If target contains a postincrement, let's not risk
4467 using it as the place to generate the rhs. */
4469 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4471 /* Expand EXP into a new pseudo. */
4472 temp = gen_reg_rtx (GET_MODE (target));
4473 temp = expand_expr (exp, temp, GET_MODE (target),
4475 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4478 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4480 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4482 /* If target is volatile, ANSI requires accessing the value
4483 *from* the target, if it is accessed. So make that happen.
4484 In no case return the target itself. */
4485 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4486 dont_return_target = 1;
4488 else if ((want_value & 1) != 0
4489 && GET_CODE (target) == MEM
4490 && ! MEM_VOLATILE_P (target)
4491 && GET_MODE (target) != BLKmode)
4492 /* If target is in memory and caller wants value in a register instead,
4493 arrange that. Pass TARGET as target for expand_expr so that,
4494 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4495 We know expand_expr will not use the target in that case.
4496 Don't do this if TARGET is volatile because we are supposed
4497 to write it and then read it. */
4499 temp = expand_expr (exp, target, GET_MODE (target),
4500 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4501 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4503 /* If TEMP is already in the desired TARGET, only copy it from
4504 memory and don't store it there again. */
4506 || (rtx_equal_p (temp, target)
4507 && ! side_effects_p (temp) && ! side_effects_p (target)))
4508 dont_store_target = 1;
4509 temp = copy_to_reg (temp);
4511 dont_return_target = 1;
4513 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4514 /* If this is a scalar in a register that is stored in a wider mode
4515 than the declared mode, compute the result into its declared mode
4516 and then convert to the wider mode. Our value is the computed
4519 rtx inner_target = 0;
4521 /* If we don't want a value, we can do the conversion inside EXP,
4522 which will often result in some optimizations. Do the conversion
4523 in two steps: first change the signedness, if needed, then
4524 the extend. But don't do this if the type of EXP is a subtype
4525 of something else since then the conversion might involve
4526 more than just converting modes. */
4527 if ((want_value & 1) == 0
4528 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4529 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4531 if (TREE_UNSIGNED (TREE_TYPE (exp))
4532 != SUBREG_PROMOTED_UNSIGNED_P (target))
4534 ((*lang_hooks.types.signed_or_unsigned_type)
4535 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4537 exp = convert ((*lang_hooks.types.type_for_mode)
4538 (GET_MODE (SUBREG_REG (target)),
4539 SUBREG_PROMOTED_UNSIGNED_P (target)),
4542 inner_target = SUBREG_REG (target);
4545 temp = expand_expr (exp, inner_target, VOIDmode,
4546 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4548 /* If TEMP is a MEM and we want a result value, make the access
4549 now so it gets done only once. Strictly speaking, this is
4550 only necessary if the MEM is volatile, or if the address
4551 overlaps TARGET. But not performing the load twice also
4552 reduces the amount of rtl we generate and then have to CSE. */
4553 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4554 temp = copy_to_reg (temp);
4556 /* If TEMP is a VOIDmode constant, use convert_modes to make
4557 sure that we properly convert it. */
4558 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4560 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4561 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4562 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4563 GET_MODE (target), temp,
4564 SUBREG_PROMOTED_UNSIGNED_P (target));
4567 convert_move (SUBREG_REG (target), temp,
4568 SUBREG_PROMOTED_UNSIGNED_P (target));
4570 /* If we promoted a constant, change the mode back down to match
4571 target. Otherwise, the caller might get confused by a result whose
4572 mode is larger than expected. */
4574 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4576 if (GET_MODE (temp) != VOIDmode)
4578 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4579 SUBREG_PROMOTED_VAR_P (temp) = 1;
4580 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4581 SUBREG_PROMOTED_UNSIGNED_P (target));
4584 temp = convert_modes (GET_MODE (target),
4585 GET_MODE (SUBREG_REG (target)),
4586 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4589 return want_value & 1 ? temp : NULL_RTX;
4593 temp = expand_expr (exp, target, GET_MODE (target),
4594 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4595 /* Return TARGET if it's a specified hardware register.
4596 If TARGET is a volatile mem ref, either return TARGET
4597 or return a reg copied *from* TARGET; ANSI requires this.
4599 Otherwise, if TEMP is not TARGET, return TEMP
4600 if it is constant (for efficiency),
4601 or if we really want the correct value. */
4602 if (!(target && GET_CODE (target) == REG
4603 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4604 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4605 && ! rtx_equal_p (temp, target)
4606 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4607 dont_return_target = 1;
4610 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4611 the same as that of TARGET, adjust the constant. This is needed, for
4612 example, in case it is a CONST_DOUBLE and we want only a word-sized
4614 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4615 && TREE_CODE (exp) != ERROR_MARK
4616 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4617 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4618 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4620 /* If value was not generated in the target, store it there.
4621 Convert the value to TARGET's type first if necessary.
4622 If TEMP and TARGET compare equal according to rtx_equal_p, but
4623 one or both of them are volatile memory refs, we have to distinguish
4625 - expand_expr has used TARGET. In this case, we must not generate
4626 another copy. This can be detected by TARGET being equal according
4628 - expand_expr has not used TARGET - that means that the source just
4629 happens to have the same RTX form. Since temp will have been created
4630 by expand_expr, it will compare unequal according to == .
4631 We must generate a copy in this case, to reach the correct number
4632 of volatile memory references. */
4634 if ((! rtx_equal_p (temp, target)
4635 || (temp != target && (side_effects_p (temp)
4636 || side_effects_p (target))))
4637 && TREE_CODE (exp) != ERROR_MARK
4638 && ! dont_store_target
4639 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4640 but TARGET is not valid memory reference, TEMP will differ
4641 from TARGET although it is really the same location. */
4642 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4643 || target != DECL_RTL_IF_SET (exp))
4644 /* If there's nothing to copy, don't bother. Don't call expr_size
4645 unless necessary, because some front-ends (C++) expr_size-hook
4646 aborts on objects that are not supposed to be bit-copied or
4648 && expr_size (exp) != const0_rtx)
4650 target = protect_from_queue (target, 1);
4651 if (GET_MODE (temp) != GET_MODE (target)
4652 && GET_MODE (temp) != VOIDmode)
4654 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4655 if (dont_return_target)
4657 /* In this case, we will return TEMP,
4658 so make sure it has the proper mode.
4659 But don't forget to store the value into TARGET. */
4660 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4661 emit_move_insn (target, temp);
4664 convert_move (target, temp, unsignedp);
4667 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4669 /* Handle copying a string constant into an array. The string
4670 constant may be shorter than the array. So copy just the string's
4671 actual length, and clear the rest. First get the size of the data
4672 type of the string, which is actually the size of the target. */
4673 rtx size = expr_size (exp);
4675 if (GET_CODE (size) == CONST_INT
4676 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4677 emit_block_move (target, temp, size,
4679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4682 /* Compute the size of the data to copy from the string. */
4684 = size_binop (MIN_EXPR,
4685 make_tree (sizetype, size),
4686 size_int (TREE_STRING_LENGTH (exp)));
4688 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4693 /* Copy that much. */
4694 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4695 TREE_UNSIGNED (sizetype));
4696 emit_block_move (target, temp, copy_size_rtx,
4698 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4700 /* Figure out how much is left in TARGET that we have to clear.
4701 Do all calculations in ptr_mode. */
4702 if (GET_CODE (copy_size_rtx) == CONST_INT)
4704 size = plus_constant (size, -INTVAL (copy_size_rtx));
4705 target = adjust_address (target, BLKmode,
4706 INTVAL (copy_size_rtx));
4710 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4711 copy_size_rtx, NULL_RTX, 0,
4714 #ifdef POINTERS_EXTEND_UNSIGNED
4715 if (GET_MODE (copy_size_rtx) != Pmode)
4716 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4717 TREE_UNSIGNED (sizetype));
4720 target = offset_address (target, copy_size_rtx,
4721 highest_pow2_factor (copy_size));
4722 label = gen_label_rtx ();
4723 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4724 GET_MODE (size), 0, label);
4727 if (size != const0_rtx)
4728 clear_storage (target, size);
4734 /* Handle calls that return values in multiple non-contiguous locations.
4735 The Irix 6 ABI has examples of this. */
4736 else if (GET_CODE (target) == PARALLEL)
4737 emit_group_load (target, temp, TREE_TYPE (exp),
4738 int_size_in_bytes (TREE_TYPE (exp)));
4739 else if (GET_MODE (temp) == BLKmode)
4740 emit_block_move (target, temp, expr_size (exp),
4742 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4744 emit_move_insn (target, temp);
4747 /* If we don't want a value, return NULL_RTX. */
4748 if ((want_value & 1) == 0)
4751 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4752 ??? The latter test doesn't seem to make sense. */
4753 else if (dont_return_target && GET_CODE (temp) != MEM)
4756 /* Return TARGET itself if it is a hard register. */
4757 else if ((want_value & 1) != 0
4758 && GET_MODE (target) != BLKmode
4759 && ! (GET_CODE (target) == REG
4760 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4761 return copy_to_reg (target);
4767 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4770 is_zeros_p (tree exp)
4774 switch (TREE_CODE (exp))
4778 case NON_LVALUE_EXPR:
4779 case VIEW_CONVERT_EXPR:
4780 return is_zeros_p (TREE_OPERAND (exp, 0));
4783 return integer_zerop (exp);
4787 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4790 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4793 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4794 elt = TREE_CHAIN (elt))
4795 if (!is_zeros_p (TREE_VALUE (elt)))
4801 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4802 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4803 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4804 if (! is_zeros_p (TREE_VALUE (elt)))
4814 /* Return 1 if EXP contains mostly (3/4) zeros. */
4817 mostly_zeros_p (tree exp)
4819 if (TREE_CODE (exp) == CONSTRUCTOR)
4821 int elts = 0, zeros = 0;
4822 tree elt = CONSTRUCTOR_ELTS (exp);
4823 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4825 /* If there are no ranges of true bits, it is all zero. */
4826 return elt == NULL_TREE;
4828 for (; elt; elt = TREE_CHAIN (elt))
4830 /* We do not handle the case where the index is a RANGE_EXPR,
4831 so the statistic will be somewhat inaccurate.
4832 We do make a more accurate count in store_constructor itself,
4833 so since this function is only used for nested array elements,
4834 this should be close enough. */
4835 if (mostly_zeros_p (TREE_VALUE (elt)))
4840 return 4 * zeros >= 3 * elts;
4843 return is_zeros_p (exp);
4846 /* Helper function for store_constructor.
4847 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4848 TYPE is the type of the CONSTRUCTOR, not the element type.
4849 CLEARED is as for store_constructor.
4850 ALIAS_SET is the alias set to use for any stores.
4852 This provides a recursive shortcut back to store_constructor when it isn't
4853 necessary to go through store_field. This is so that we can pass through
4854 the cleared field to let store_constructor know that we may not have to
4855 clear a substructure if the outer structure has already been cleared. */
4858 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4859 HOST_WIDE_INT bitpos, enum machine_mode mode,
4860 tree exp, tree type, int cleared, int alias_set)
4862 if (TREE_CODE (exp) == CONSTRUCTOR
4863 && bitpos % BITS_PER_UNIT == 0
4864 /* If we have a nonzero bitpos for a register target, then we just
4865 let store_field do the bitfield handling. This is unlikely to
4866 generate unnecessary clear instructions anyways. */
4867 && (bitpos == 0 || GET_CODE (target) == MEM))
4869 if (GET_CODE (target) == MEM)
4871 = adjust_address (target,
4872 GET_MODE (target) == BLKmode
4874 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4875 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4878 /* Update the alias set, if required. */
4879 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4880 && MEM_ALIAS_SET (target) != 0)
4882 target = copy_rtx (target);
4883 set_mem_alias_set (target, alias_set);
4886 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4889 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4893 /* Store the value of constructor EXP into the rtx TARGET.
4894 TARGET is either a REG or a MEM; we know it cannot conflict, since
4895 safe_from_p has been called.
4896 CLEARED is true if TARGET is known to have been zero'd.
4897 SIZE is the number of bytes of TARGET we are allowed to modify: this
4898 may not be the same as the size of EXP if we are assigning to a field
4899 which has been packed to exclude padding bits. */
4902 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4904 tree type = TREE_TYPE (exp);
4905 #ifdef WORD_REGISTER_OPERATIONS
4906 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4909 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4910 || TREE_CODE (type) == QUAL_UNION_TYPE)
4914 /* If size is zero or the target is already cleared, do nothing. */
4915 if (size == 0 || cleared)
4917 /* We either clear the aggregate or indicate the value is dead. */
4918 else if ((TREE_CODE (type) == UNION_TYPE
4919 || TREE_CODE (type) == QUAL_UNION_TYPE)
4920 && ! CONSTRUCTOR_ELTS (exp))
4921 /* If the constructor is empty, clear the union. */
4923 clear_storage (target, expr_size (exp));
4927 /* If we are building a static constructor into a register,
4928 set the initial value as zero so we can fold the value into
4929 a constant. But if more than one register is involved,
4930 this probably loses. */
4931 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4932 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4934 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4938 /* If the constructor has fewer fields than the structure
4939 or if we are initializing the structure to mostly zeros,
4940 clear the whole structure first. Don't do this if TARGET is a
4941 register whose mode size isn't equal to SIZE since clear_storage
4942 can't handle this case. */
4943 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4944 || mostly_zeros_p (exp))
4945 && (GET_CODE (target) != REG
4946 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4949 rtx xtarget = target;
4951 if (readonly_fields_p (type))
4953 xtarget = copy_rtx (xtarget);
4954 RTX_UNCHANGING_P (xtarget) = 1;
4957 clear_storage (xtarget, GEN_INT (size));
4962 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4964 /* Store each element of the constructor into
4965 the corresponding field of TARGET. */
4967 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4969 tree field = TREE_PURPOSE (elt);
4970 tree value = TREE_VALUE (elt);
4971 enum machine_mode mode;
4972 HOST_WIDE_INT bitsize;
4973 HOST_WIDE_INT bitpos = 0;
4975 rtx to_rtx = target;
4977 /* Just ignore missing fields.
4978 We cleared the whole structure, above,
4979 if any fields are missing. */
4983 if (cleared && is_zeros_p (value))
4986 if (host_integerp (DECL_SIZE (field), 1))
4987 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4991 mode = DECL_MODE (field);
4992 if (DECL_BIT_FIELD (field))
4995 offset = DECL_FIELD_OFFSET (field);
4996 if (host_integerp (offset, 0)
4997 && host_integerp (bit_position (field), 0))
4999 bitpos = int_bit_position (field);
5003 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5009 if (CONTAINS_PLACEHOLDER_P (offset))
5010 offset = build (WITH_RECORD_EXPR, sizetype,
5011 offset, make_tree (TREE_TYPE (exp), target));
5013 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5014 if (GET_CODE (to_rtx) != MEM)
5017 #ifdef POINTERS_EXTEND_UNSIGNED
5018 if (GET_MODE (offset_rtx) != Pmode)
5019 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5021 if (GET_MODE (offset_rtx) != ptr_mode)
5022 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5025 to_rtx = offset_address (to_rtx, offset_rtx,
5026 highest_pow2_factor (offset));
5029 if (TREE_READONLY (field))
5031 if (GET_CODE (to_rtx) == MEM)
5032 to_rtx = copy_rtx (to_rtx);
5034 RTX_UNCHANGING_P (to_rtx) = 1;
5037 #ifdef WORD_REGISTER_OPERATIONS
5038 /* If this initializes a field that is smaller than a word, at the
5039 start of a word, try to widen it to a full word.
5040 This special case allows us to output C++ member function
5041 initializations in a form that the optimizers can understand. */
5042 if (GET_CODE (target) == REG
5043 && bitsize < BITS_PER_WORD
5044 && bitpos % BITS_PER_WORD == 0
5045 && GET_MODE_CLASS (mode) == MODE_INT
5046 && TREE_CODE (value) == INTEGER_CST
5048 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5050 tree type = TREE_TYPE (value);
5052 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5054 type = (*lang_hooks.types.type_for_size)
5055 (BITS_PER_WORD, TREE_UNSIGNED (type));
5056 value = convert (type, value);
5059 if (BYTES_BIG_ENDIAN)
5061 = fold (build (LSHIFT_EXPR, type, value,
5062 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5063 bitsize = BITS_PER_WORD;
5068 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5069 && DECL_NONADDRESSABLE_P (field))
5071 to_rtx = copy_rtx (to_rtx);
5072 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5075 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5076 value, type, cleared,
5077 get_alias_set (TREE_TYPE (field)));
5080 else if (TREE_CODE (type) == ARRAY_TYPE
5081 || TREE_CODE (type) == VECTOR_TYPE)
5086 tree domain = TYPE_DOMAIN (type);
5087 tree elttype = TREE_TYPE (type);
5089 HOST_WIDE_INT minelt = 0;
5090 HOST_WIDE_INT maxelt = 0;
5092 /* Vectors are like arrays, but the domain is stored via an array
5094 if (TREE_CODE (type) == VECTOR_TYPE)
5096 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5097 the same field as TYPE_DOMAIN, we are not guaranteed that
5099 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5100 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5103 const_bounds_p = (TYPE_MIN_VALUE (domain)
5104 && TYPE_MAX_VALUE (domain)
5105 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5106 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5108 /* If we have constant bounds for the range of the type, get them. */
5111 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5112 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5115 /* If the constructor has fewer elements than the array,
5116 clear the whole array first. Similarly if this is
5117 static constructor of a non-BLKmode object. */
5118 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5122 HOST_WIDE_INT count = 0, zero_count = 0;
5123 need_to_clear = ! const_bounds_p;
5125 /* This loop is a more accurate version of the loop in
5126 mostly_zeros_p (it handles RANGE_EXPR in an index).
5127 It is also needed to check for missing elements. */
5128 for (elt = CONSTRUCTOR_ELTS (exp);
5129 elt != NULL_TREE && ! need_to_clear;
5130 elt = TREE_CHAIN (elt))
5132 tree index = TREE_PURPOSE (elt);
5133 HOST_WIDE_INT this_node_count;
5135 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5137 tree lo_index = TREE_OPERAND (index, 0);
5138 tree hi_index = TREE_OPERAND (index, 1);
5140 if (! host_integerp (lo_index, 1)
5141 || ! host_integerp (hi_index, 1))
5147 this_node_count = (tree_low_cst (hi_index, 1)
5148 - tree_low_cst (lo_index, 1) + 1);
5151 this_node_count = 1;
5153 count += this_node_count;
5154 if (mostly_zeros_p (TREE_VALUE (elt)))
5155 zero_count += this_node_count;
5158 /* Clear the entire array first if there are any missing elements,
5159 or if the incidence of zero elements is >= 75%. */
5161 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5165 if (need_to_clear && size > 0)
5170 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5172 clear_storage (target, GEN_INT (size));
5176 else if (REG_P (target))
5177 /* Inform later passes that the old value is dead. */
5178 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5180 /* Store each element of the constructor into
5181 the corresponding element of TARGET, determined
5182 by counting the elements. */
5183 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5185 elt = TREE_CHAIN (elt), i++)
5187 enum machine_mode mode;
5188 HOST_WIDE_INT bitsize;
5189 HOST_WIDE_INT bitpos;
5191 tree value = TREE_VALUE (elt);
5192 tree index = TREE_PURPOSE (elt);
5193 rtx xtarget = target;
5195 if (cleared && is_zeros_p (value))
5198 unsignedp = TREE_UNSIGNED (elttype);
5199 mode = TYPE_MODE (elttype);
5200 if (mode == BLKmode)
5201 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5202 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5205 bitsize = GET_MODE_BITSIZE (mode);
5207 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5209 tree lo_index = TREE_OPERAND (index, 0);
5210 tree hi_index = TREE_OPERAND (index, 1);
5211 rtx index_r, pos_rtx, loop_end;
5212 struct nesting *loop;
5213 HOST_WIDE_INT lo, hi, count;
5216 /* If the range is constant and "small", unroll the loop. */
5218 && host_integerp (lo_index, 0)
5219 && host_integerp (hi_index, 0)
5220 && (lo = tree_low_cst (lo_index, 0),
5221 hi = tree_low_cst (hi_index, 0),
5222 count = hi - lo + 1,
5223 (GET_CODE (target) != MEM
5225 || (host_integerp (TYPE_SIZE (elttype), 1)
5226 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5229 lo -= minelt; hi -= minelt;
5230 for (; lo <= hi; lo++)
5232 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5234 if (GET_CODE (target) == MEM
5235 && !MEM_KEEP_ALIAS_SET_P (target)
5236 && TREE_CODE (type) == ARRAY_TYPE
5237 && TYPE_NONALIASED_COMPONENT (type))
5239 target = copy_rtx (target);
5240 MEM_KEEP_ALIAS_SET_P (target) = 1;
5243 store_constructor_field
5244 (target, bitsize, bitpos, mode, value, type, cleared,
5245 get_alias_set (elttype));
5250 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5251 loop_end = gen_label_rtx ();
5253 unsignedp = TREE_UNSIGNED (domain);
5255 index = build_decl (VAR_DECL, NULL_TREE, domain);
5258 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5260 SET_DECL_RTL (index, index_r);
5261 if (TREE_CODE (value) == SAVE_EXPR
5262 && SAVE_EXPR_RTL (value) == 0)
5264 /* Make sure value gets expanded once before the
5266 expand_expr (value, const0_rtx, VOIDmode, 0);
5269 store_expr (lo_index, index_r, 0);
5270 loop = expand_start_loop (0);
5272 /* Assign value to element index. */
5274 = convert (ssizetype,
5275 fold (build (MINUS_EXPR, TREE_TYPE (index),
5276 index, TYPE_MIN_VALUE (domain))));
5277 position = size_binop (MULT_EXPR, position,
5279 TYPE_SIZE_UNIT (elttype)));
5281 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5282 xtarget = offset_address (target, pos_rtx,
5283 highest_pow2_factor (position));
5284 xtarget = adjust_address (xtarget, mode, 0);
5285 if (TREE_CODE (value) == CONSTRUCTOR)
5286 store_constructor (value, xtarget, cleared,
5287 bitsize / BITS_PER_UNIT);
5289 store_expr (value, xtarget, 0);
5291 expand_exit_loop_if_false (loop,
5292 build (LT_EXPR, integer_type_node,
5295 expand_increment (build (PREINCREMENT_EXPR,
5297 index, integer_one_node), 0, 0);
5299 emit_label (loop_end);
5302 else if ((index != 0 && ! host_integerp (index, 0))
5303 || ! host_integerp (TYPE_SIZE (elttype), 1))
5308 index = ssize_int (1);
5311 index = convert (ssizetype,
5312 fold (build (MINUS_EXPR, index,
5313 TYPE_MIN_VALUE (domain))));
5315 position = size_binop (MULT_EXPR, index,
5317 TYPE_SIZE_UNIT (elttype)));
5318 xtarget = offset_address (target,
5319 expand_expr (position, 0, VOIDmode, 0),
5320 highest_pow2_factor (position));
5321 xtarget = adjust_address (xtarget, mode, 0);
5322 store_expr (value, xtarget, 0);
5327 bitpos = ((tree_low_cst (index, 0) - minelt)
5328 * tree_low_cst (TYPE_SIZE (elttype), 1));
5330 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5332 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5333 && TREE_CODE (type) == ARRAY_TYPE
5334 && TYPE_NONALIASED_COMPONENT (type))
5336 target = copy_rtx (target);
5337 MEM_KEEP_ALIAS_SET_P (target) = 1;
5340 store_constructor_field (target, bitsize, bitpos, mode, value,
5341 type, cleared, get_alias_set (elttype));
5347 /* Set constructor assignments. */
5348 else if (TREE_CODE (type) == SET_TYPE)
5350 tree elt = CONSTRUCTOR_ELTS (exp);
5351 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5352 tree domain = TYPE_DOMAIN (type);
5353 tree domain_min, domain_max, bitlength;
5355 /* The default implementation strategy is to extract the constant
5356 parts of the constructor, use that to initialize the target,
5357 and then "or" in whatever non-constant ranges we need in addition.
5359 If a large set is all zero or all ones, it is
5360 probably better to set it using memset (if available) or bzero.
5361 Also, if a large set has just a single range, it may also be
5362 better to first clear all the first clear the set (using
5363 bzero/memset), and set the bits we want. */
5365 /* Check for all zeros. */
5366 if (elt == NULL_TREE && size > 0)
5369 clear_storage (target, GEN_INT (size));
5373 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5374 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5375 bitlength = size_binop (PLUS_EXPR,
5376 size_diffop (domain_max, domain_min),
5379 nbits = tree_low_cst (bitlength, 1);
5381 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5382 are "complicated" (more than one range), initialize (the
5383 constant parts) by copying from a constant. */
5384 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5385 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5387 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5388 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5389 char *bit_buffer = alloca (nbits);
5390 HOST_WIDE_INT word = 0;
5391 unsigned int bit_pos = 0;
5392 unsigned int ibit = 0;
5393 unsigned int offset = 0; /* In bytes from beginning of set. */
5395 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5398 if (bit_buffer[ibit])
5400 if (BYTES_BIG_ENDIAN)
5401 word |= (1 << (set_word_size - 1 - bit_pos));
5403 word |= 1 << bit_pos;
5407 if (bit_pos >= set_word_size || ibit == nbits)
5409 if (word != 0 || ! cleared)
5411 rtx datum = GEN_INT (word);
5414 /* The assumption here is that it is safe to use
5415 XEXP if the set is multi-word, but not if
5416 it's single-word. */
5417 if (GET_CODE (target) == MEM)
5418 to_rtx = adjust_address (target, mode, offset);
5419 else if (offset == 0)
5423 emit_move_insn (to_rtx, datum);
5430 offset += set_word_size / BITS_PER_UNIT;
5435 /* Don't bother clearing storage if the set is all ones. */
5436 if (TREE_CHAIN (elt) != NULL_TREE
5437 || (TREE_PURPOSE (elt) == NULL_TREE
5439 : ( ! host_integerp (TREE_VALUE (elt), 0)
5440 || ! host_integerp (TREE_PURPOSE (elt), 0)
5441 || (tree_low_cst (TREE_VALUE (elt), 0)
5442 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5443 != (HOST_WIDE_INT) nbits))))
5444 clear_storage (target, expr_size (exp));
5446 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5448 /* Start of range of element or NULL. */
5449 tree startbit = TREE_PURPOSE (elt);
5450 /* End of range of element, or element value. */
5451 tree endbit = TREE_VALUE (elt);
5452 HOST_WIDE_INT startb, endb;
5453 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5455 bitlength_rtx = expand_expr (bitlength,
5456 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5458 /* Handle non-range tuple element like [ expr ]. */
5459 if (startbit == NULL_TREE)
5461 startbit = save_expr (endbit);
5465 startbit = convert (sizetype, startbit);
5466 endbit = convert (sizetype, endbit);
5467 if (! integer_zerop (domain_min))
5469 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5470 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5472 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5473 EXPAND_CONST_ADDRESS);
5474 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5475 EXPAND_CONST_ADDRESS);
5481 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5482 (GET_MODE (target), 0),
5485 emit_move_insn (targetx, target);
5488 else if (GET_CODE (target) == MEM)
5493 /* Optimization: If startbit and endbit are constants divisible
5494 by BITS_PER_UNIT, call memset instead. */
5495 if (TARGET_MEM_FUNCTIONS
5496 && TREE_CODE (startbit) == INTEGER_CST
5497 && TREE_CODE (endbit) == INTEGER_CST
5498 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5499 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5501 emit_library_call (memset_libfunc, LCT_NORMAL,
5503 plus_constant (XEXP (targetx, 0),
5504 startb / BITS_PER_UNIT),
5506 constm1_rtx, TYPE_MODE (integer_type_node),
5507 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5508 TYPE_MODE (sizetype));
5511 emit_library_call (setbits_libfunc, LCT_NORMAL,
5512 VOIDmode, 4, XEXP (targetx, 0),
5513 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5514 startbit_rtx, TYPE_MODE (sizetype),
5515 endbit_rtx, TYPE_MODE (sizetype));
5518 emit_move_insn (target, targetx);
5526 /* Store the value of EXP (an expression tree)
5527 into a subfield of TARGET which has mode MODE and occupies
5528 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5529 If MODE is VOIDmode, it means that we are storing into a bit-field.
5531 If VALUE_MODE is VOIDmode, return nothing in particular.
5532 UNSIGNEDP is not used in this case.
5534 Otherwise, return an rtx for the value stored. This rtx
5535 has mode VALUE_MODE if that is convenient to do.
5536 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5538 TYPE is the type of the underlying object,
5540 ALIAS_SET is the alias set for the destination. This value will
5541 (in general) be different from that for TARGET, since TARGET is a
5542 reference to the containing structure. */
5545 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5546 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5547 int unsignedp, tree type, int alias_set)
5549 HOST_WIDE_INT width_mask = 0;
5551 if (TREE_CODE (exp) == ERROR_MARK)
5554 /* If we have nothing to store, do nothing unless the expression has
5557 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5558 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5559 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5561 /* If we are storing into an unaligned field of an aligned union that is
5562 in a register, we may have the mode of TARGET being an integer mode but
5563 MODE == BLKmode. In that case, get an aligned object whose size and
5564 alignment are the same as TARGET and store TARGET into it (we can avoid
5565 the store if the field being stored is the entire width of TARGET). Then
5566 call ourselves recursively to store the field into a BLKmode version of
5567 that object. Finally, load from the object into TARGET. This is not
5568 very efficient in general, but should only be slightly more expensive
5569 than the otherwise-required unaligned accesses. Perhaps this can be
5570 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5571 twice, once with emit_move_insn and once via store_field. */
5574 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5576 rtx object = assign_temp (type, 0, 1, 1);
5577 rtx blk_object = adjust_address (object, BLKmode, 0);
5579 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5580 emit_move_insn (object, target);
5582 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5585 emit_move_insn (target, object);
5587 /* We want to return the BLKmode version of the data. */
5591 if (GET_CODE (target) == CONCAT)
5593 /* We're storing into a struct containing a single __complex. */
5597 return store_expr (exp, target, 0);
5600 /* If the structure is in a register or if the component
5601 is a bit field, we cannot use addressing to access it.
5602 Use bit-field techniques or SUBREG to store in it. */
5604 if (mode == VOIDmode
5605 || (mode != BLKmode && ! direct_store[(int) mode]
5606 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5607 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5608 || GET_CODE (target) == REG
5609 || GET_CODE (target) == SUBREG
5610 /* If the field isn't aligned enough to store as an ordinary memref,
5611 store it as a bit field. */
5613 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5614 || bitpos % GET_MODE_ALIGNMENT (mode))
5615 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5616 || (bitpos % BITS_PER_UNIT != 0)))
5617 /* If the RHS and field are a constant size and the size of the
5618 RHS isn't the same size as the bitfield, we must use bitfield
5621 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5622 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5624 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5626 /* If BITSIZE is narrower than the size of the type of EXP
5627 we will be narrowing TEMP. Normally, what's wanted are the
5628 low-order bits. However, if EXP's type is a record and this is
5629 big-endian machine, we want the upper BITSIZE bits. */
5630 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5631 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5632 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5633 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5634 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5638 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5640 if (mode != VOIDmode && mode != BLKmode
5641 && mode != TYPE_MODE (TREE_TYPE (exp)))
5642 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5644 /* If the modes of TARGET and TEMP are both BLKmode, both
5645 must be in memory and BITPOS must be aligned on a byte
5646 boundary. If so, we simply do a block copy. */
5647 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5649 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5650 || bitpos % BITS_PER_UNIT != 0)
5653 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5654 emit_block_move (target, temp,
5655 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5659 return value_mode == VOIDmode ? const0_rtx : target;
5662 /* Store the value in the bitfield. */
5663 store_bit_field (target, bitsize, bitpos, mode, temp,
5664 int_size_in_bytes (type));
5666 if (value_mode != VOIDmode)
5668 /* The caller wants an rtx for the value.
5669 If possible, avoid refetching from the bitfield itself. */
5671 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5674 enum machine_mode tmode;
5676 tmode = GET_MODE (temp);
5677 if (tmode == VOIDmode)
5681 return expand_and (tmode, temp,
5682 gen_int_mode (width_mask, tmode),
5685 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5686 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5687 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5690 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5691 NULL_RTX, value_mode, VOIDmode,
5692 int_size_in_bytes (type));
5698 rtx addr = XEXP (target, 0);
5699 rtx to_rtx = target;
5701 /* If a value is wanted, it must be the lhs;
5702 so make the address stable for multiple use. */
5704 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5705 && ! CONSTANT_ADDRESS_P (addr)
5706 /* A frame-pointer reference is already stable. */
5707 && ! (GET_CODE (addr) == PLUS
5708 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5709 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5710 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5711 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5713 /* Now build a reference to just the desired component. */
5715 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5717 if (to_rtx == target)
5718 to_rtx = copy_rtx (to_rtx);
5720 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5721 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5722 set_mem_alias_set (to_rtx, alias_set);
5724 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5728 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5729 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5730 codes and find the ultimate containing object, which we return.
5732 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5733 bit position, and *PUNSIGNEDP to the signedness of the field.
5734 If the position of the field is variable, we store a tree
5735 giving the variable offset (in units) in *POFFSET.
5736 This offset is in addition to the bit position.
5737 If the position is not variable, we store 0 in *POFFSET.
5739 If any of the extraction expressions is volatile,
5740 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5742 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5743 is a mode that can be used to access the field. In that case, *PBITSIZE
5746 If the field describes a variable-sized object, *PMODE is set to
5747 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5748 this case, but the address of the object can be found. */
5751 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5752 HOST_WIDE_INT *pbitpos, tree *poffset,
5753 enum machine_mode *pmode, int *punsignedp,
5757 enum machine_mode mode = VOIDmode;
5758 tree offset = size_zero_node;
5759 tree bit_offset = bitsize_zero_node;
5760 tree placeholder_ptr = 0;
5763 /* First get the mode, signedness, and size. We do this from just the
5764 outermost expression. */
5765 if (TREE_CODE (exp) == COMPONENT_REF)
5767 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5768 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5769 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5771 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5773 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5775 size_tree = TREE_OPERAND (exp, 1);
5776 *punsignedp = TREE_UNSIGNED (exp);
5780 mode = TYPE_MODE (TREE_TYPE (exp));
5781 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5783 if (mode == BLKmode)
5784 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5786 *pbitsize = GET_MODE_BITSIZE (mode);
5791 if (! host_integerp (size_tree, 1))
5792 mode = BLKmode, *pbitsize = -1;
5794 *pbitsize = tree_low_cst (size_tree, 1);
5797 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5798 and find the ultimate containing object. */
5801 if (TREE_CODE (exp) == BIT_FIELD_REF)
5802 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5803 else if (TREE_CODE (exp) == COMPONENT_REF)
5805 tree field = TREE_OPERAND (exp, 1);
5806 tree this_offset = DECL_FIELD_OFFSET (field);
5808 /* If this field hasn't been filled in yet, don't go
5809 past it. This should only happen when folding expressions
5810 made during type construction. */
5811 if (this_offset == 0)
5813 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5814 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5816 offset = size_binop (PLUS_EXPR, offset, this_offset);
5817 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5818 DECL_FIELD_BIT_OFFSET (field));
5820 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5823 else if (TREE_CODE (exp) == ARRAY_REF
5824 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5826 tree index = TREE_OPERAND (exp, 1);
5827 tree array = TREE_OPERAND (exp, 0);
5828 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5829 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5830 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5832 /* We assume all arrays have sizes that are a multiple of a byte.
5833 First subtract the lower bound, if any, in the type of the
5834 index, then convert to sizetype and multiply by the size of the
5836 if (low_bound != 0 && ! integer_zerop (low_bound))
5837 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5840 /* If the index has a self-referential type, pass it to a
5841 WITH_RECORD_EXPR; if the component size is, pass our
5842 component to one. */
5843 if (CONTAINS_PLACEHOLDER_P (index))
5844 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5845 if (CONTAINS_PLACEHOLDER_P (unit_size))
5846 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5848 offset = size_binop (PLUS_EXPR, offset,
5849 size_binop (MULT_EXPR,
5850 convert (sizetype, index),
5854 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5856 tree new = find_placeholder (exp, &placeholder_ptr);
5858 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5859 We might have been called from tree optimization where we
5860 haven't set up an object yet. */
5869 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5870 conversions that don't change the mode, and all view conversions
5871 except those that need to "step up" the alignment. */
5872 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5873 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5874 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5875 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5877 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5878 < BIGGEST_ALIGNMENT)
5879 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5880 || TYPE_ALIGN_OK (TREE_TYPE
5881 (TREE_OPERAND (exp, 0))))))
5882 && ! ((TREE_CODE (exp) == NOP_EXPR
5883 || TREE_CODE (exp) == CONVERT_EXPR)
5884 && (TYPE_MODE (TREE_TYPE (exp))
5885 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5888 /* If any reference in the chain is volatile, the effect is volatile. */
5889 if (TREE_THIS_VOLATILE (exp))
5892 exp = TREE_OPERAND (exp, 0);
5895 /* If OFFSET is constant, see if we can return the whole thing as a
5896 constant bit position. Otherwise, split it up. */
5897 if (host_integerp (offset, 0)
5898 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5900 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5901 && host_integerp (tem, 0))
5902 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5904 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5910 /* Return 1 if T is an expression that get_inner_reference handles. */
5913 handled_component_p (tree t)
5915 switch (TREE_CODE (t))
5920 case ARRAY_RANGE_REF:
5921 case NON_LVALUE_EXPR:
5922 case VIEW_CONVERT_EXPR:
5925 /* ??? Sure they are handled, but get_inner_reference may return
5926 a different PBITSIZE, depending upon whether the expression is
5927 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5930 return (TYPE_MODE (TREE_TYPE (t))
5931 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5938 /* Given an rtx VALUE that may contain additions and multiplications, return
5939 an equivalent value that just refers to a register, memory, or constant.
5940 This is done by generating instructions to perform the arithmetic and
5941 returning a pseudo-register containing the value.
5943 The returned value may be a REG, SUBREG, MEM or constant. */
5946 force_operand (rtx value, rtx target)
5949 /* Use subtarget as the target for operand 0 of a binary operation. */
5950 rtx subtarget = get_subtarget (target);
5951 enum rtx_code code = GET_CODE (value);
5953 /* Check for a PIC address load. */
5954 if ((code == PLUS || code == MINUS)
5955 && XEXP (value, 0) == pic_offset_table_rtx
5956 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5957 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5958 || GET_CODE (XEXP (value, 1)) == CONST))
5961 subtarget = gen_reg_rtx (GET_MODE (value));
5962 emit_move_insn (subtarget, value);
5966 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5969 target = gen_reg_rtx (GET_MODE (value));
5970 convert_move (target, force_operand (XEXP (value, 0), NULL),
5971 code == ZERO_EXTEND);
5975 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5977 op2 = XEXP (value, 1);
5978 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5980 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5983 op2 = negate_rtx (GET_MODE (value), op2);
5986 /* Check for an addition with OP2 a constant integer and our first
5987 operand a PLUS of a virtual register and something else. In that
5988 case, we want to emit the sum of the virtual register and the
5989 constant first and then add the other value. This allows virtual
5990 register instantiation to simply modify the constant rather than
5991 creating another one around this addition. */
5992 if (code == PLUS && GET_CODE (op2) == CONST_INT
5993 && GET_CODE (XEXP (value, 0)) == PLUS
5994 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5995 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5996 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5998 rtx temp = expand_simple_binop (GET_MODE (value), code,
5999 XEXP (XEXP (value, 0), 0), op2,
6000 subtarget, 0, OPTAB_LIB_WIDEN);
6001 return expand_simple_binop (GET_MODE (value), code, temp,
6002 force_operand (XEXP (XEXP (value,
6004 target, 0, OPTAB_LIB_WIDEN);
6007 op1 = force_operand (XEXP (value, 0), subtarget);
6008 op2 = force_operand (op2, NULL_RTX);
6012 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6014 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6015 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6016 target, 1, OPTAB_LIB_WIDEN);
6018 return expand_divmod (0,
6019 FLOAT_MODE_P (GET_MODE (value))
6020 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6021 GET_MODE (value), op1, op2, target, 0);
6024 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6028 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6032 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6036 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6037 target, 0, OPTAB_LIB_WIDEN);
6040 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6041 target, 1, OPTAB_LIB_WIDEN);
6044 if (GET_RTX_CLASS (code) == '1')
6046 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6047 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6050 #ifdef INSN_SCHEDULING
6051 /* On machines that have insn scheduling, we want all memory reference to be
6052 explicit, so we need to deal with such paradoxical SUBREGs. */
6053 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6054 && (GET_MODE_SIZE (GET_MODE (value))
6055 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6057 = simplify_gen_subreg (GET_MODE (value),
6058 force_reg (GET_MODE (SUBREG_REG (value)),
6059 force_operand (SUBREG_REG (value),
6061 GET_MODE (SUBREG_REG (value)),
6062 SUBREG_BYTE (value));
6068 /* Subroutine of expand_expr: return nonzero iff there is no way that
6069 EXP can reference X, which is being modified. TOP_P is nonzero if this
6070 call is going to be used to determine whether we need a temporary
6071 for EXP, as opposed to a recursive call to this function.
6073 It is always safe for this routine to return zero since it merely
6074 searches for optimization opportunities. */
6077 safe_from_p (rtx x, tree exp, int top_p)
6081 static tree save_expr_list;
6084 /* If EXP has varying size, we MUST use a target since we currently
6085 have no way of allocating temporaries of variable size
6086 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6087 So we assume here that something at a higher level has prevented a
6088 clash. This is somewhat bogus, but the best we can do. Only
6089 do this when X is BLKmode and when we are at the top level. */
6090 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6091 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6092 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6093 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6094 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6096 && GET_MODE (x) == BLKmode)
6097 /* If X is in the outgoing argument area, it is always safe. */
6098 || (GET_CODE (x) == MEM
6099 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6100 || (GET_CODE (XEXP (x, 0)) == PLUS
6101 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6104 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6105 find the underlying pseudo. */
6106 if (GET_CODE (x) == SUBREG)
6109 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6113 /* A SAVE_EXPR might appear many times in the expression passed to the
6114 top-level safe_from_p call, and if it has a complex subexpression,
6115 examining it multiple times could result in a combinatorial explosion.
6116 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6117 with optimization took about 28 minutes to compile -- even though it was
6118 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6119 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6120 we have processed. Note that the only test of top_p was above. */
6129 rtn = safe_from_p (x, exp, 0);
6131 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6132 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6137 /* Now look at our tree code and possibly recurse. */
6138 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6141 exp_rtl = DECL_RTL_IF_SET (exp);
6148 if (TREE_CODE (exp) == TREE_LIST)
6152 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6154 exp = TREE_CHAIN (exp);
6157 if (TREE_CODE (exp) != TREE_LIST)
6158 return safe_from_p (x, exp, 0);
6161 else if (TREE_CODE (exp) == ERROR_MARK)
6162 return 1; /* An already-visited SAVE_EXPR? */
6168 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6173 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6177 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6178 the expression. If it is set, we conflict iff we are that rtx or
6179 both are in memory. Otherwise, we check all operands of the
6180 expression recursively. */
6182 switch (TREE_CODE (exp))
6185 /* If the operand is static or we are static, we can't conflict.
6186 Likewise if we don't conflict with the operand at all. */
6187 if (staticp (TREE_OPERAND (exp, 0))
6188 || TREE_STATIC (exp)
6189 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6192 /* Otherwise, the only way this can conflict is if we are taking
6193 the address of a DECL a that address if part of X, which is
6195 exp = TREE_OPERAND (exp, 0);
6198 if (!DECL_RTL_SET_P (exp)
6199 || GET_CODE (DECL_RTL (exp)) != MEM)
6202 exp_rtl = XEXP (DECL_RTL (exp), 0);
6207 if (GET_CODE (x) == MEM
6208 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6209 get_alias_set (exp)))
6214 /* Assume that the call will clobber all hard registers and
6216 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6217 || GET_CODE (x) == MEM)
6222 /* If a sequence exists, we would have to scan every instruction
6223 in the sequence to see if it was safe. This is probably not
6225 if (RTL_EXPR_SEQUENCE (exp))
6228 exp_rtl = RTL_EXPR_RTL (exp);
6231 case WITH_CLEANUP_EXPR:
6232 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6235 case CLEANUP_POINT_EXPR:
6236 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6239 exp_rtl = SAVE_EXPR_RTL (exp);
6243 /* If we've already scanned this, don't do it again. Otherwise,
6244 show we've scanned it and record for clearing the flag if we're
6246 if (TREE_PRIVATE (exp))
6249 TREE_PRIVATE (exp) = 1;
6250 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6252 TREE_PRIVATE (exp) = 0;
6256 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6260 /* The only operand we look at is operand 1. The rest aren't
6261 part of the expression. */
6262 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6268 /* If we have an rtx, we do not need to scan our operands. */
6272 nops = first_rtl_op (TREE_CODE (exp));
6273 for (i = 0; i < nops; i++)
6274 if (TREE_OPERAND (exp, i) != 0
6275 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6278 /* If this is a language-specific tree code, it may require
6279 special handling. */
6280 if ((unsigned int) TREE_CODE (exp)
6281 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6282 && !(*lang_hooks.safe_from_p) (x, exp))
6286 /* If we have an rtl, find any enclosed object. Then see if we conflict
6290 if (GET_CODE (exp_rtl) == SUBREG)
6292 exp_rtl = SUBREG_REG (exp_rtl);
6293 if (GET_CODE (exp_rtl) == REG
6294 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6298 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6299 are memory and they conflict. */
6300 return ! (rtx_equal_p (x, exp_rtl)
6301 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6302 && true_dependence (exp_rtl, VOIDmode, x,
6303 rtx_addr_varies_p)));
6306 /* If we reach here, it is safe. */
6310 /* Subroutine of expand_expr: return rtx if EXP is a
6311 variable or parameter; else return 0. */
6317 switch (TREE_CODE (exp))
6321 return DECL_RTL (exp);
6327 #ifdef MAX_INTEGER_COMPUTATION_MODE
6330 check_max_integer_computation_mode (tree exp)
6332 enum tree_code code;
6333 enum machine_mode mode;
6335 /* Strip any NOPs that don't change the mode. */
6337 code = TREE_CODE (exp);
6339 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6340 if (code == NOP_EXPR
6341 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6344 /* First check the type of the overall operation. We need only look at
6345 unary, binary and relational operations. */
6346 if (TREE_CODE_CLASS (code) == '1'
6347 || TREE_CODE_CLASS (code) == '2'
6348 || TREE_CODE_CLASS (code) == '<')
6350 mode = TYPE_MODE (TREE_TYPE (exp));
6351 if (GET_MODE_CLASS (mode) == MODE_INT
6352 && mode > MAX_INTEGER_COMPUTATION_MODE)
6353 internal_error ("unsupported wide integer operation");
6356 /* Check operand of a unary op. */
6357 if (TREE_CODE_CLASS (code) == '1')
6359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6360 if (GET_MODE_CLASS (mode) == MODE_INT
6361 && mode > MAX_INTEGER_COMPUTATION_MODE)
6362 internal_error ("unsupported wide integer operation");
6365 /* Check operands of a binary/comparison op. */
6366 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6368 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6369 if (GET_MODE_CLASS (mode) == MODE_INT
6370 && mode > MAX_INTEGER_COMPUTATION_MODE)
6371 internal_error ("unsupported wide integer operation");
6373 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6374 if (GET_MODE_CLASS (mode) == MODE_INT
6375 && mode > MAX_INTEGER_COMPUTATION_MODE)
6376 internal_error ("unsupported wide integer operation");
6381 /* Return the highest power of two that EXP is known to be a multiple of.
6382 This is used in updating alignment of MEMs in array references. */
6384 static unsigned HOST_WIDE_INT
6385 highest_pow2_factor (tree exp)
6387 unsigned HOST_WIDE_INT c0, c1;
6389 switch (TREE_CODE (exp))
6392 /* We can find the lowest bit that's a one. If the low
6393 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6394 We need to handle this case since we can find it in a COND_EXPR,
6395 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6396 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6398 if (TREE_CONSTANT_OVERFLOW (exp))
6399 return BIGGEST_ALIGNMENT;
6402 /* Note: tree_low_cst is intentionally not used here,
6403 we don't care about the upper bits. */
6404 c0 = TREE_INT_CST_LOW (exp);
6406 return c0 ? c0 : BIGGEST_ALIGNMENT;
6410 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6411 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6412 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6413 return MIN (c0, c1);
6416 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6417 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6420 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6422 if (integer_pow2p (TREE_OPERAND (exp, 1))
6423 && host_integerp (TREE_OPERAND (exp, 1), 1))
6425 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6426 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6427 return MAX (1, c0 / c1);
6431 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6432 case SAVE_EXPR: case WITH_RECORD_EXPR:
6433 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6436 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6439 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6440 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6441 return MIN (c0, c1);
6450 /* Similar, except that it is known that the expression must be a multiple
6451 of the alignment of TYPE. */
6453 static unsigned HOST_WIDE_INT
6454 highest_pow2_factor_for_type (tree type, tree exp)
6456 unsigned HOST_WIDE_INT type_align, factor;
6458 factor = highest_pow2_factor (exp);
6459 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6460 return MAX (factor, type_align);
6463 /* Return an object on the placeholder list that matches EXP, a
6464 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6465 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6466 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6467 is a location which initially points to a starting location in the
6468 placeholder list (zero means start of the list) and where a pointer into
6469 the placeholder list at which the object is found is placed. */
6472 find_placeholder (tree exp, tree *plist)
6474 tree type = TREE_TYPE (exp);
6475 tree placeholder_expr;
6477 for (placeholder_expr
6478 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6479 placeholder_expr != 0;
6480 placeholder_expr = TREE_CHAIN (placeholder_expr))
6482 tree need_type = TYPE_MAIN_VARIANT (type);
6485 /* Find the outermost reference that is of the type we want. If none,
6486 see if any object has a type that is a pointer to the type we
6488 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6489 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6490 || TREE_CODE (elt) == COND_EXPR)
6491 ? TREE_OPERAND (elt, 1)
6492 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6493 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6494 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6495 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6496 ? TREE_OPERAND (elt, 0) : 0))
6497 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6500 *plist = placeholder_expr;
6504 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6506 = ((TREE_CODE (elt) == COMPOUND_EXPR
6507 || TREE_CODE (elt) == COND_EXPR)
6508 ? TREE_OPERAND (elt, 1)
6509 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6510 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6511 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6512 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6513 ? TREE_OPERAND (elt, 0) : 0))
6514 if (POINTER_TYPE_P (TREE_TYPE (elt))
6515 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6519 *plist = placeholder_expr;
6520 return build1 (INDIRECT_REF, need_type, elt);
6527 /* Subroutine of expand_expr. Expand the two operands of a binary
6528 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6529 The value may be stored in TARGET if TARGET is nonzero. The
6530 MODIFIER argument is as documented by expand_expr. */
6533 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6534 enum expand_modifier modifier)
6536 if (! safe_from_p (target, exp1, 1))
6538 if (operand_equal_p (exp0, exp1, 0))
6540 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6541 *op1 = copy_rtx (*op0);
6545 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6546 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6551 /* expand_expr: generate code for computing expression EXP.
6552 An rtx for the computed value is returned. The value is never null.
6553 In the case of a void EXP, const0_rtx is returned.
6555 The value may be stored in TARGET if TARGET is nonzero.
6556 TARGET is just a suggestion; callers must assume that
6557 the rtx returned may not be the same as TARGET.
6559 If TARGET is CONST0_RTX, it means that the value will be ignored.
6561 If TMODE is not VOIDmode, it suggests generating the
6562 result in mode TMODE. But this is done only when convenient.
6563 Otherwise, TMODE is ignored and the value generated in its natural mode.
6564 TMODE is just a suggestion; callers must assume that
6565 the rtx returned may not have mode TMODE.
6567 Note that TARGET may have neither TMODE nor MODE. In that case, it
6568 probably will not be used.
6570 If MODIFIER is EXPAND_SUM then when EXP is an addition
6571 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6572 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6573 products as above, or REG or MEM, or constant.
6574 Ordinarily in such cases we would output mul or add instructions
6575 and then return a pseudo reg containing the sum.
6577 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6578 it also marks a label as absolutely required (it can't be dead).
6579 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6580 This is used for outputting expressions used in initializers.
6582 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6583 with a constant address even if that address is not normally legitimate.
6584 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6586 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6587 a call parameter. Such targets require special care as we haven't yet
6588 marked TARGET so that it's safe from being trashed by libcalls. We
6589 don't want to use TARGET for anything but the final result;
6590 Intermediate values must go elsewhere. Additionally, calls to
6591 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6594 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6595 enum expand_modifier modifier)
6598 tree type = TREE_TYPE (exp);
6599 int unsignedp = TREE_UNSIGNED (type);
6600 enum machine_mode mode;
6601 enum tree_code code = TREE_CODE (exp);
6603 rtx subtarget, original_target;
6607 /* Handle ERROR_MARK before anybody tries to access its type. */
6608 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6610 op0 = CONST0_RTX (tmode);
6616 mode = TYPE_MODE (type);
6617 /* Use subtarget as the target for operand 0 of a binary operation. */
6618 subtarget = get_subtarget (target);
6619 original_target = target;
6620 ignore = (target == const0_rtx
6621 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6622 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6623 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6624 && TREE_CODE (type) == VOID_TYPE));
6626 /* If we are going to ignore this result, we need only do something
6627 if there is a side-effect somewhere in the expression. If there
6628 is, short-circuit the most common cases here. Note that we must
6629 not call expand_expr with anything but const0_rtx in case this
6630 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6634 if (! TREE_SIDE_EFFECTS (exp))
6637 /* Ensure we reference a volatile object even if value is ignored, but
6638 don't do this if all we are doing is taking its address. */
6639 if (TREE_THIS_VOLATILE (exp)
6640 && TREE_CODE (exp) != FUNCTION_DECL
6641 && mode != VOIDmode && mode != BLKmode
6642 && modifier != EXPAND_CONST_ADDRESS)
6644 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6645 if (GET_CODE (temp) == MEM)
6646 temp = copy_to_reg (temp);
6650 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6651 || code == INDIRECT_REF || code == BUFFER_REF)
6652 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6655 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6656 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6658 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6659 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6662 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6663 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6664 /* If the second operand has no side effects, just evaluate
6666 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6668 else if (code == BIT_FIELD_REF)
6670 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6671 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6672 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6679 #ifdef MAX_INTEGER_COMPUTATION_MODE
6680 /* Only check stuff here if the mode we want is different from the mode
6681 of the expression; if it's the same, check_max_integer_computation_mode
6682 will handle it. Do we really need to check this stuff at all? */
6685 && GET_MODE (target) != mode
6686 && TREE_CODE (exp) != INTEGER_CST
6687 && TREE_CODE (exp) != PARM_DECL
6688 && TREE_CODE (exp) != ARRAY_REF
6689 && TREE_CODE (exp) != ARRAY_RANGE_REF
6690 && TREE_CODE (exp) != COMPONENT_REF
6691 && TREE_CODE (exp) != BIT_FIELD_REF
6692 && TREE_CODE (exp) != INDIRECT_REF
6693 && TREE_CODE (exp) != CALL_EXPR
6694 && TREE_CODE (exp) != VAR_DECL
6695 && TREE_CODE (exp) != RTL_EXPR)
6697 enum machine_mode mode = GET_MODE (target);
6699 if (GET_MODE_CLASS (mode) == MODE_INT
6700 && mode > MAX_INTEGER_COMPUTATION_MODE)
6701 internal_error ("unsupported wide integer operation");
6705 && TREE_CODE (exp) != INTEGER_CST
6706 && TREE_CODE (exp) != PARM_DECL
6707 && TREE_CODE (exp) != ARRAY_REF
6708 && TREE_CODE (exp) != ARRAY_RANGE_REF
6709 && TREE_CODE (exp) != COMPONENT_REF
6710 && TREE_CODE (exp) != BIT_FIELD_REF
6711 && TREE_CODE (exp) != INDIRECT_REF
6712 && TREE_CODE (exp) != VAR_DECL
6713 && TREE_CODE (exp) != CALL_EXPR
6714 && TREE_CODE (exp) != RTL_EXPR
6715 && GET_MODE_CLASS (tmode) == MODE_INT
6716 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6717 internal_error ("unsupported wide integer operation");
6719 check_max_integer_computation_mode (exp);
6722 /* If will do cse, generate all results into pseudo registers
6723 since 1) that allows cse to find more things
6724 and 2) otherwise cse could produce an insn the machine
6725 cannot support. An exception is a CONSTRUCTOR into a multi-word
6726 MEM: that's much more likely to be most efficient into the MEM.
6727 Another is a CALL_EXPR which must return in memory. */
6729 if (! cse_not_expected && mode != BLKmode && target
6730 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6731 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6732 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6739 tree function = decl_function_context (exp);
6740 /* Labels in containing functions, or labels used from initializers,
6742 if (modifier == EXPAND_INITIALIZER
6743 || (function != current_function_decl
6744 && function != inline_function_decl
6746 temp = force_label_rtx (exp);
6748 temp = label_rtx (exp);
6750 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6751 if (function != current_function_decl
6752 && function != inline_function_decl && function != 0)
6753 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6758 if (!DECL_RTL_SET_P (exp))
6760 error ("%Hprior parameter's size depends on '%D'",
6761 &DECL_SOURCE_LOCATION (exp), exp);
6762 return CONST0_RTX (mode);
6765 /* ... fall through ... */
6768 /* If a static var's type was incomplete when the decl was written,
6769 but the type is complete now, lay out the decl now. */
6770 if (DECL_SIZE (exp) == 0
6771 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6772 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6773 layout_decl (exp, 0);
6775 /* ... fall through ... */
6779 if (DECL_RTL (exp) == 0)
6782 /* Ensure variable marked as used even if it doesn't go through
6783 a parser. If it hasn't be used yet, write out an external
6785 if (! TREE_USED (exp))
6787 assemble_external (exp);
6788 TREE_USED (exp) = 1;
6791 /* Show we haven't gotten RTL for this yet. */
6794 /* Handle variables inherited from containing functions. */
6795 context = decl_function_context (exp);
6797 /* We treat inline_function_decl as an alias for the current function
6798 because that is the inline function whose vars, types, etc.
6799 are being merged into the current function.
6800 See expand_inline_function. */
6802 if (context != 0 && context != current_function_decl
6803 && context != inline_function_decl
6804 /* If var is static, we don't need a static chain to access it. */
6805 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6806 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6810 /* Mark as non-local and addressable. */
6811 DECL_NONLOCAL (exp) = 1;
6812 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6814 (*lang_hooks.mark_addressable) (exp);
6815 if (GET_CODE (DECL_RTL (exp)) != MEM)
6817 addr = XEXP (DECL_RTL (exp), 0);
6818 if (GET_CODE (addr) == MEM)
6820 = replace_equiv_address (addr,
6821 fix_lexical_addr (XEXP (addr, 0), exp));
6823 addr = fix_lexical_addr (addr, exp);
6825 temp = replace_equiv_address (DECL_RTL (exp), addr);
6828 /* This is the case of an array whose size is to be determined
6829 from its initializer, while the initializer is still being parsed.
6832 else if (GET_CODE (DECL_RTL (exp)) == MEM
6833 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6834 temp = validize_mem (DECL_RTL (exp));
6836 /* If DECL_RTL is memory, we are in the normal case and either
6837 the address is not valid or it is not a register and -fforce-addr
6838 is specified, get the address into a register. */
6840 else if (GET_CODE (DECL_RTL (exp)) == MEM
6841 && modifier != EXPAND_CONST_ADDRESS
6842 && modifier != EXPAND_SUM
6843 && modifier != EXPAND_INITIALIZER
6844 && (! memory_address_p (DECL_MODE (exp),
6845 XEXP (DECL_RTL (exp), 0))
6847 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6848 temp = replace_equiv_address (DECL_RTL (exp),
6849 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6851 /* If we got something, return it. But first, set the alignment
6852 if the address is a register. */
6855 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6856 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6861 /* If the mode of DECL_RTL does not match that of the decl, it
6862 must be a promoted value. We return a SUBREG of the wanted mode,
6863 but mark it so that we know that it was already extended. */
6865 if (GET_CODE (DECL_RTL (exp)) == REG
6866 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6868 /* Get the signedness used for this variable. Ensure we get the
6869 same mode we got when the variable was declared. */
6870 if (GET_MODE (DECL_RTL (exp))
6871 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6872 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6875 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6876 SUBREG_PROMOTED_VAR_P (temp) = 1;
6877 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6881 return DECL_RTL (exp);
6884 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6885 TREE_INT_CST_HIGH (exp), mode);
6887 /* ??? If overflow is set, fold will have done an incomplete job,
6888 which can result in (plus xx (const_int 0)), which can get
6889 simplified by validate_replace_rtx during virtual register
6890 instantiation, which can result in unrecognizable insns.
6891 Avoid this by forcing all overflows into registers. */
6892 if (TREE_CONSTANT_OVERFLOW (exp)
6893 && modifier != EXPAND_INITIALIZER)
6894 temp = force_reg (mode, temp);
6899 return const_vector_from_tree (exp);
6902 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6905 /* If optimized, generate immediate CONST_DOUBLE
6906 which will be turned into memory by reload if necessary.
6908 We used to force a register so that loop.c could see it. But
6909 this does not allow gen_* patterns to perform optimizations with
6910 the constants. It also produces two insns in cases like "x = 1.0;".
6911 On most machines, floating-point constants are not permitted in
6912 many insns, so we'd end up copying it to a register in any case.
6914 Now, we do the copying in expand_binop, if appropriate. */
6915 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6916 TYPE_MODE (TREE_TYPE (exp)));
6919 /* Handle evaluating a complex constant in a CONCAT target. */
6920 if (original_target && GET_CODE (original_target) == CONCAT)
6922 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6925 rtarg = XEXP (original_target, 0);
6926 itarg = XEXP (original_target, 1);
6928 /* Move the real and imaginary parts separately. */
6929 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6930 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6933 emit_move_insn (rtarg, op0);
6935 emit_move_insn (itarg, op1);
6937 return original_target;
6940 /* ... fall through ... */
6943 temp = output_constant_def (exp, 1);
6945 /* temp contains a constant address.
6946 On RISC machines where a constant address isn't valid,
6947 make some insns to get that address into a register. */
6948 if (modifier != EXPAND_CONST_ADDRESS
6949 && modifier != EXPAND_INITIALIZER
6950 && modifier != EXPAND_SUM
6951 && (! memory_address_p (mode, XEXP (temp, 0))
6952 || flag_force_addr))
6953 return replace_equiv_address (temp,
6954 copy_rtx (XEXP (temp, 0)));
6957 case EXPR_WITH_FILE_LOCATION:
6960 struct file_stack fs;
6962 fs.location = input_location;
6963 fs.next = expr_wfl_stack;
6964 input_filename = EXPR_WFL_FILENAME (exp);
6965 input_line = EXPR_WFL_LINENO (exp);
6966 expr_wfl_stack = &fs;
6967 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6968 emit_line_note (input_location);
6969 /* Possibly avoid switching back and forth here. */
6970 to_return = expand_expr (EXPR_WFL_NODE (exp),
6971 (ignore ? const0_rtx : target),
6973 if (expr_wfl_stack != &fs)
6975 input_location = fs.location;
6976 expr_wfl_stack = fs.next;
6981 context = decl_function_context (exp);
6983 /* If this SAVE_EXPR was at global context, assume we are an
6984 initialization function and move it into our context. */
6986 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6988 /* We treat inline_function_decl as an alias for the current function
6989 because that is the inline function whose vars, types, etc.
6990 are being merged into the current function.
6991 See expand_inline_function. */
6992 if (context == current_function_decl || context == inline_function_decl)
6995 /* If this is non-local, handle it. */
6998 /* The following call just exists to abort if the context is
6999 not of a containing function. */
7000 find_function_data (context);
7002 temp = SAVE_EXPR_RTL (exp);
7003 if (temp && GET_CODE (temp) == REG)
7005 put_var_into_stack (exp, /*rescan=*/true);
7006 temp = SAVE_EXPR_RTL (exp);
7008 if (temp == 0 || GET_CODE (temp) != MEM)
7011 replace_equiv_address (temp,
7012 fix_lexical_addr (XEXP (temp, 0), exp));
7014 if (SAVE_EXPR_RTL (exp) == 0)
7016 if (mode == VOIDmode)
7019 temp = assign_temp (build_qualified_type (type,
7021 | TYPE_QUAL_CONST)),
7024 SAVE_EXPR_RTL (exp) = temp;
7025 if (!optimize && GET_CODE (temp) == REG)
7026 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7029 /* If the mode of TEMP does not match that of the expression, it
7030 must be a promoted value. We pass store_expr a SUBREG of the
7031 wanted mode but mark it so that we know that it was already
7034 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7036 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7037 promote_mode (type, mode, &unsignedp, 0);
7038 SUBREG_PROMOTED_VAR_P (temp) = 1;
7039 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7042 if (temp == const0_rtx)
7043 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7045 store_expr (TREE_OPERAND (exp, 0), temp,
7046 modifier == EXPAND_STACK_PARM ? 2 : 0);
7048 TREE_USED (exp) = 1;
7051 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7052 must be a promoted value. We return a SUBREG of the wanted mode,
7053 but mark it so that we know that it was already extended. */
7055 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7056 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7058 /* Compute the signedness and make the proper SUBREG. */
7059 promote_mode (type, mode, &unsignedp, 0);
7060 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7061 SUBREG_PROMOTED_VAR_P (temp) = 1;
7062 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7066 return SAVE_EXPR_RTL (exp);
7071 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7072 TREE_OPERAND (exp, 0)
7073 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7077 case PLACEHOLDER_EXPR:
7079 tree old_list = placeholder_list;
7080 tree placeholder_expr = 0;
7082 exp = find_placeholder (exp, &placeholder_expr);
7086 placeholder_list = TREE_CHAIN (placeholder_expr);
7087 temp = expand_expr (exp, original_target, tmode, modifier);
7088 placeholder_list = old_list;
7092 case WITH_RECORD_EXPR:
7093 /* Put the object on the placeholder list, expand our first operand,
7094 and pop the list. */
7095 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7097 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7099 placeholder_list = TREE_CHAIN (placeholder_list);
7103 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7104 expand_goto (TREE_OPERAND (exp, 0));
7106 expand_computed_goto (TREE_OPERAND (exp, 0));
7110 expand_exit_loop_if_false (NULL,
7111 invert_truthvalue (TREE_OPERAND (exp, 0)));
7114 case LABELED_BLOCK_EXPR:
7115 if (LABELED_BLOCK_BODY (exp))
7116 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7117 /* Should perhaps use expand_label, but this is simpler and safer. */
7118 do_pending_stack_adjust ();
7119 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7122 case EXIT_BLOCK_EXPR:
7123 if (EXIT_BLOCK_RETURN (exp))
7124 sorry ("returned value in block_exit_expr");
7125 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7130 expand_start_loop (1);
7131 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7139 tree vars = TREE_OPERAND (exp, 0);
7141 /* Need to open a binding contour here because
7142 if there are any cleanups they must be contained here. */
7143 expand_start_bindings (2);
7145 /* Mark the corresponding BLOCK for output in its proper place. */
7146 if (TREE_OPERAND (exp, 2) != 0
7147 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7148 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7150 /* If VARS have not yet been expanded, expand them now. */
7153 if (!DECL_RTL_SET_P (vars))
7155 expand_decl_init (vars);
7156 vars = TREE_CHAIN (vars);
7159 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7161 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7167 if (RTL_EXPR_SEQUENCE (exp))
7169 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7171 emit_insn (RTL_EXPR_SEQUENCE (exp));
7172 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7174 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7175 free_temps_for_rtl_expr (exp);
7176 return RTL_EXPR_RTL (exp);
7179 /* If we don't need the result, just ensure we evaluate any
7185 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7186 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7191 /* All elts simple constants => refer to a constant in memory. But
7192 if this is a non-BLKmode mode, let it store a field at a time
7193 since that should make a CONST_INT or CONST_DOUBLE when we
7194 fold. Likewise, if we have a target we can use, it is best to
7195 store directly into the target unless the type is large enough
7196 that memcpy will be used. If we are making an initializer and
7197 all operands are constant, put it in memory as well.
7199 FIXME: Avoid trying to fill vector constructors piece-meal.
7200 Output them with output_constant_def below unless we're sure
7201 they're zeros. This should go away when vector initializers
7202 are treated like VECTOR_CST instead of arrays.
7204 else if ((TREE_STATIC (exp)
7205 && ((mode == BLKmode
7206 && ! (target != 0 && safe_from_p (target, exp, 1)))
7207 || TREE_ADDRESSABLE (exp)
7208 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7209 && (! MOVE_BY_PIECES_P
7210 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7212 && ((TREE_CODE (type) == VECTOR_TYPE
7213 && !is_zeros_p (exp))
7214 || ! mostly_zeros_p (exp)))))
7215 || ((modifier == EXPAND_INITIALIZER
7216 || modifier == EXPAND_CONST_ADDRESS)
7217 && TREE_CONSTANT (exp)))
7219 rtx constructor = output_constant_def (exp, 1);
7221 if (modifier != EXPAND_CONST_ADDRESS
7222 && modifier != EXPAND_INITIALIZER
7223 && modifier != EXPAND_SUM)
7224 constructor = validize_mem (constructor);
7230 /* Handle calls that pass values in multiple non-contiguous
7231 locations. The Irix 6 ABI has examples of this. */
7232 if (target == 0 || ! safe_from_p (target, exp, 1)
7233 || GET_CODE (target) == PARALLEL
7234 || modifier == EXPAND_STACK_PARM)
7236 = assign_temp (build_qualified_type (type,
7238 | (TREE_READONLY (exp)
7239 * TYPE_QUAL_CONST))),
7240 0, TREE_ADDRESSABLE (exp), 1);
7242 store_constructor (exp, target, 0, int_expr_size (exp));
7248 tree exp1 = TREE_OPERAND (exp, 0);
7250 tree string = string_constant (exp1, &index);
7252 /* Try to optimize reads from const strings. */
7254 && TREE_CODE (string) == STRING_CST
7255 && TREE_CODE (index) == INTEGER_CST
7256 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7257 && GET_MODE_CLASS (mode) == MODE_INT
7258 && GET_MODE_SIZE (mode) == 1
7259 && modifier != EXPAND_WRITE)
7260 return gen_int_mode (TREE_STRING_POINTER (string)
7261 [TREE_INT_CST_LOW (index)], mode);
7263 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7264 op0 = memory_address (mode, op0);
7265 temp = gen_rtx_MEM (mode, op0);
7266 set_mem_attributes (temp, exp, 0);
7268 /* If we are writing to this object and its type is a record with
7269 readonly fields, we must mark it as readonly so it will
7270 conflict with readonly references to those fields. */
7271 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7272 RTX_UNCHANGING_P (temp) = 1;
7278 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7282 tree array = TREE_OPERAND (exp, 0);
7283 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7284 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7285 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7288 /* Optimize the special-case of a zero lower bound.
7290 We convert the low_bound to sizetype to avoid some problems
7291 with constant folding. (E.g. suppose the lower bound is 1,
7292 and its mode is QI. Without the conversion, (ARRAY
7293 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7294 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7296 if (! integer_zerop (low_bound))
7297 index = size_diffop (index, convert (sizetype, low_bound));
7299 /* Fold an expression like: "foo"[2].
7300 This is not done in fold so it won't happen inside &.
7301 Don't fold if this is for wide characters since it's too
7302 difficult to do correctly and this is a very rare case. */
7304 if (modifier != EXPAND_CONST_ADDRESS
7305 && modifier != EXPAND_INITIALIZER
7306 && modifier != EXPAND_MEMORY
7307 && TREE_CODE (array) == STRING_CST
7308 && TREE_CODE (index) == INTEGER_CST
7309 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7310 && GET_MODE_CLASS (mode) == MODE_INT
7311 && GET_MODE_SIZE (mode) == 1)
7312 return gen_int_mode (TREE_STRING_POINTER (array)
7313 [TREE_INT_CST_LOW (index)], mode);
7315 /* If this is a constant index into a constant array,
7316 just get the value from the array. Handle both the cases when
7317 we have an explicit constructor and when our operand is a variable
7318 that was declared const. */
7320 if (modifier != EXPAND_CONST_ADDRESS
7321 && modifier != EXPAND_INITIALIZER
7322 && modifier != EXPAND_MEMORY
7323 && TREE_CODE (array) == CONSTRUCTOR
7324 && ! TREE_SIDE_EFFECTS (array)
7325 && TREE_CODE (index) == INTEGER_CST
7326 && 0 > compare_tree_int (index,
7327 list_length (CONSTRUCTOR_ELTS
7328 (TREE_OPERAND (exp, 0)))))
7332 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7333 i = TREE_INT_CST_LOW (index);
7334 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7338 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7342 else if (optimize >= 1
7343 && modifier != EXPAND_CONST_ADDRESS
7344 && modifier != EXPAND_INITIALIZER
7345 && modifier != EXPAND_MEMORY
7346 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7347 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7348 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7350 if (TREE_CODE (index) == INTEGER_CST)
7352 tree init = DECL_INITIAL (array);
7354 if (TREE_CODE (init) == CONSTRUCTOR)
7358 for (elem = CONSTRUCTOR_ELTS (init);
7360 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7361 elem = TREE_CHAIN (elem))
7364 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7365 return expand_expr (fold (TREE_VALUE (elem)), target,
7368 else if (TREE_CODE (init) == STRING_CST
7369 && 0 > compare_tree_int (index,
7370 TREE_STRING_LENGTH (init)))
7372 tree type = TREE_TYPE (TREE_TYPE (init));
7373 enum machine_mode mode = TYPE_MODE (type);
7375 if (GET_MODE_CLASS (mode) == MODE_INT
7376 && GET_MODE_SIZE (mode) == 1)
7377 return gen_int_mode (TREE_STRING_POINTER (init)
7378 [TREE_INT_CST_LOW (index)], mode);
7383 goto normal_inner_ref;
7386 /* If the operand is a CONSTRUCTOR, we can just extract the
7387 appropriate field if it is present. */
7388 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7392 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7393 elt = TREE_CHAIN (elt))
7394 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7395 /* We can normally use the value of the field in the
7396 CONSTRUCTOR. However, if this is a bitfield in
7397 an integral mode that we can fit in a HOST_WIDE_INT,
7398 we must mask only the number of bits in the bitfield,
7399 since this is done implicitly by the constructor. If
7400 the bitfield does not meet either of those conditions,
7401 we can't do this optimization. */
7402 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7403 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7405 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7406 <= HOST_BITS_PER_WIDE_INT))))
7408 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7409 && modifier == EXPAND_STACK_PARM)
7411 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7412 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7414 HOST_WIDE_INT bitsize
7415 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7416 enum machine_mode imode
7417 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7419 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7421 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7422 op0 = expand_and (imode, op0, op1, target);
7427 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7430 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7432 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7440 goto normal_inner_ref;
7443 case ARRAY_RANGE_REF:
7446 enum machine_mode mode1;
7447 HOST_WIDE_INT bitsize, bitpos;
7450 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7451 &mode1, &unsignedp, &volatilep);
7454 /* If we got back the original object, something is wrong. Perhaps
7455 we are evaluating an expression too early. In any event, don't
7456 infinitely recurse. */
7460 /* If TEM's type is a union of variable size, pass TARGET to the inner
7461 computation, since it will need a temporary and TARGET is known
7462 to have to do. This occurs in unchecked conversion in Ada. */
7466 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7467 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7469 && modifier != EXPAND_STACK_PARM
7470 ? target : NULL_RTX),
7472 (modifier == EXPAND_INITIALIZER
7473 || modifier == EXPAND_CONST_ADDRESS
7474 || modifier == EXPAND_STACK_PARM)
7475 ? modifier : EXPAND_NORMAL);
7477 /* If this is a constant, put it into a register if it is a
7478 legitimate constant and OFFSET is 0 and memory if it isn't. */
7479 if (CONSTANT_P (op0))
7481 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7482 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7484 op0 = force_reg (mode, op0);
7486 op0 = validize_mem (force_const_mem (mode, op0));
7489 /* Otherwise, if this object not in memory and we either have an
7490 offset or a BLKmode result, put it there. This case can't occur in
7491 C, but can in Ada if we have unchecked conversion of an expression
7492 from a scalar type to an array or record type or for an
7493 ARRAY_RANGE_REF whose type is BLKmode. */
7494 else if (GET_CODE (op0) != MEM
7496 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7498 /* If the operand is a SAVE_EXPR, we can deal with this by
7499 forcing the SAVE_EXPR into memory. */
7500 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7502 put_var_into_stack (TREE_OPERAND (exp, 0),
7504 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7509 = build_qualified_type (TREE_TYPE (tem),
7510 (TYPE_QUALS (TREE_TYPE (tem))
7511 | TYPE_QUAL_CONST));
7512 rtx memloc = assign_temp (nt, 1, 1, 1);
7514 emit_move_insn (memloc, op0);
7521 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7524 if (GET_CODE (op0) != MEM)
7527 #ifdef POINTERS_EXTEND_UNSIGNED
7528 if (GET_MODE (offset_rtx) != Pmode)
7529 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7531 if (GET_MODE (offset_rtx) != ptr_mode)
7532 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7535 /* A constant address in OP0 can have VOIDmode, we must not try
7536 to call force_reg for that case. Avoid that case. */
7537 if (GET_CODE (op0) == MEM
7538 && GET_MODE (op0) == BLKmode
7539 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7541 && (bitpos % bitsize) == 0
7542 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7543 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7545 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7549 op0 = offset_address (op0, offset_rtx,
7550 highest_pow2_factor (offset));
7553 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7554 record its alignment as BIGGEST_ALIGNMENT. */
7555 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7556 && is_aligning_offset (offset, tem))
7557 set_mem_align (op0, BIGGEST_ALIGNMENT);
7559 /* Don't forget about volatility even if this is a bitfield. */
7560 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7562 if (op0 == orig_op0)
7563 op0 = copy_rtx (op0);
7565 MEM_VOLATILE_P (op0) = 1;
7568 /* The following code doesn't handle CONCAT.
7569 Assume only bitpos == 0 can be used for CONCAT, due to
7570 one element arrays having the same mode as its element. */
7571 if (GET_CODE (op0) == CONCAT)
7573 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7578 /* In cases where an aligned union has an unaligned object
7579 as a field, we might be extracting a BLKmode value from
7580 an integer-mode (e.g., SImode) object. Handle this case
7581 by doing the extract into an object as wide as the field
7582 (which we know to be the width of a basic mode), then
7583 storing into memory, and changing the mode to BLKmode. */
7584 if (mode1 == VOIDmode
7585 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7586 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7587 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7588 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7589 && modifier != EXPAND_CONST_ADDRESS
7590 && modifier != EXPAND_INITIALIZER)
7591 /* If the field isn't aligned enough to fetch as a memref,
7592 fetch it as a bit field. */
7593 || (mode1 != BLKmode
7594 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7595 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7596 && ((modifier == EXPAND_CONST_ADDRESS
7597 || modifier == EXPAND_INITIALIZER)
7599 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7600 || (bitpos % BITS_PER_UNIT != 0)))
7601 /* If the type and the field are a constant size and the
7602 size of the type isn't the same size as the bitfield,
7603 we must use bitfield operations. */
7605 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7607 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7610 enum machine_mode ext_mode = mode;
7612 if (ext_mode == BLKmode
7613 && ! (target != 0 && GET_CODE (op0) == MEM
7614 && GET_CODE (target) == MEM
7615 && bitpos % BITS_PER_UNIT == 0))
7616 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7618 if (ext_mode == BLKmode)
7620 /* In this case, BITPOS must start at a byte boundary and
7621 TARGET, if specified, must be a MEM. */
7622 if (GET_CODE (op0) != MEM
7623 || (target != 0 && GET_CODE (target) != MEM)
7624 || bitpos % BITS_PER_UNIT != 0)
7627 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7629 target = assign_temp (type, 0, 1, 1);
7631 emit_block_move (target, op0,
7632 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7634 (modifier == EXPAND_STACK_PARM
7635 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7640 op0 = validize_mem (op0);
7642 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7643 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7645 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7646 (modifier == EXPAND_STACK_PARM
7647 ? NULL_RTX : target),
7649 int_size_in_bytes (TREE_TYPE (tem)));
7651 /* If the result is a record type and BITSIZE is narrower than
7652 the mode of OP0, an integral mode, and this is a big endian
7653 machine, we must put the field into the high-order bits. */
7654 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7655 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7656 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7657 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7658 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7662 if (mode == BLKmode)
7664 rtx new = assign_temp (build_qualified_type
7665 ((*lang_hooks.types.type_for_mode)
7667 TYPE_QUAL_CONST), 0, 1, 1);
7669 emit_move_insn (new, op0);
7670 op0 = copy_rtx (new);
7671 PUT_MODE (op0, BLKmode);
7672 set_mem_attributes (op0, exp, 1);
7678 /* If the result is BLKmode, use that to access the object
7680 if (mode == BLKmode)
7683 /* Get a reference to just this component. */
7684 if (modifier == EXPAND_CONST_ADDRESS
7685 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7686 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7688 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7690 if (op0 == orig_op0)
7691 op0 = copy_rtx (op0);
7693 set_mem_attributes (op0, exp, 0);
7694 if (GET_CODE (XEXP (op0, 0)) == REG)
7695 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7697 MEM_VOLATILE_P (op0) |= volatilep;
7698 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7699 || modifier == EXPAND_CONST_ADDRESS
7700 || modifier == EXPAND_INITIALIZER)
7702 else if (target == 0)
7703 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7705 convert_move (target, op0, unsignedp);
7711 rtx insn, before = get_last_insn (), vtbl_ref;
7713 /* Evaluate the interior expression. */
7714 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7717 /* Get or create an instruction off which to hang a note. */
7718 if (REG_P (subtarget))
7721 insn = get_last_insn ();
7724 if (! INSN_P (insn))
7725 insn = prev_nonnote_insn (insn);
7729 target = gen_reg_rtx (GET_MODE (subtarget));
7730 insn = emit_move_insn (target, subtarget);
7733 /* Collect the data for the note. */
7734 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7735 vtbl_ref = plus_constant (vtbl_ref,
7736 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7737 /* Discard the initial CONST that was added. */
7738 vtbl_ref = XEXP (vtbl_ref, 0);
7741 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7746 /* Intended for a reference to a buffer of a file-object in Pascal.
7747 But it's not certain that a special tree code will really be
7748 necessary for these. INDIRECT_REF might work for them. */
7754 /* Pascal set IN expression.
7757 rlo = set_low - (set_low%bits_per_word);
7758 the_word = set [ (index - rlo)/bits_per_word ];
7759 bit_index = index % bits_per_word;
7760 bitmask = 1 << bit_index;
7761 return !!(the_word & bitmask); */
7763 tree set = TREE_OPERAND (exp, 0);
7764 tree index = TREE_OPERAND (exp, 1);
7765 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7766 tree set_type = TREE_TYPE (set);
7767 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7768 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7769 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7770 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7771 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7772 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7773 rtx setaddr = XEXP (setval, 0);
7774 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7776 rtx diff, quo, rem, addr, bit, result;
7778 /* If domain is empty, answer is no. Likewise if index is constant
7779 and out of bounds. */
7780 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7781 && TREE_CODE (set_low_bound) == INTEGER_CST
7782 && tree_int_cst_lt (set_high_bound, set_low_bound))
7783 || (TREE_CODE (index) == INTEGER_CST
7784 && TREE_CODE (set_low_bound) == INTEGER_CST
7785 && tree_int_cst_lt (index, set_low_bound))
7786 || (TREE_CODE (set_high_bound) == INTEGER_CST
7787 && TREE_CODE (index) == INTEGER_CST
7788 && tree_int_cst_lt (set_high_bound, index))))
7792 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7794 /* If we get here, we have to generate the code for both cases
7795 (in range and out of range). */
7797 op0 = gen_label_rtx ();
7798 op1 = gen_label_rtx ();
7800 if (! (GET_CODE (index_val) == CONST_INT
7801 && GET_CODE (lo_r) == CONST_INT))
7802 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7803 GET_MODE (index_val), iunsignedp, op1);
7805 if (! (GET_CODE (index_val) == CONST_INT
7806 && GET_CODE (hi_r) == CONST_INT))
7807 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7808 GET_MODE (index_val), iunsignedp, op1);
7810 /* Calculate the element number of bit zero in the first word
7812 if (GET_CODE (lo_r) == CONST_INT)
7813 rlow = GEN_INT (INTVAL (lo_r)
7814 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7816 rlow = expand_binop (index_mode, and_optab, lo_r,
7817 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7818 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7820 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7821 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7823 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7824 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7825 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7826 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7828 addr = memory_address (byte_mode,
7829 expand_binop (index_mode, add_optab, diff,
7830 setaddr, NULL_RTX, iunsignedp,
7833 /* Extract the bit we want to examine. */
7834 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7835 gen_rtx_MEM (byte_mode, addr),
7836 make_tree (TREE_TYPE (index), rem),
7838 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7839 GET_MODE (target) == byte_mode ? target : 0,
7840 1, OPTAB_LIB_WIDEN);
7842 if (result != target)
7843 convert_move (target, result, 1);
7845 /* Output the code to handle the out-of-range case. */
7848 emit_move_insn (target, const0_rtx);
7853 case WITH_CLEANUP_EXPR:
7854 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7856 WITH_CLEANUP_EXPR_RTL (exp)
7857 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7858 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7859 CLEANUP_EH_ONLY (exp));
7861 /* That's it for this cleanup. */
7862 TREE_OPERAND (exp, 1) = 0;
7864 return WITH_CLEANUP_EXPR_RTL (exp);
7866 case CLEANUP_POINT_EXPR:
7868 /* Start a new binding layer that will keep track of all cleanup
7869 actions to be performed. */
7870 expand_start_bindings (2);
7872 target_temp_slot_level = temp_slot_level;
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7875 /* If we're going to use this value, load it up now. */
7877 op0 = force_not_mem (op0);
7878 preserve_temp_slots (op0);
7879 expand_end_bindings (NULL_TREE, 0, 0);
7884 /* Check for a built-in function. */
7885 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7886 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7888 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7890 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7891 == BUILT_IN_FRONTEND)
7892 return (*lang_hooks.expand_expr) (exp, original_target,
7895 return expand_builtin (exp, target, subtarget, tmode, ignore);
7898 return expand_call (exp, target, ignore);
7900 case NON_LVALUE_EXPR:
7903 case REFERENCE_EXPR:
7904 if (TREE_OPERAND (exp, 0) == error_mark_node)
7907 if (TREE_CODE (type) == UNION_TYPE)
7909 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7911 /* If both input and output are BLKmode, this conversion isn't doing
7912 anything except possibly changing memory attribute. */
7913 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7915 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7918 result = copy_rtx (result);
7919 set_mem_attributes (result, exp, 0);
7924 target = assign_temp (type, 0, 1, 1);
7926 if (GET_CODE (target) == MEM)
7927 /* Store data into beginning of memory target. */
7928 store_expr (TREE_OPERAND (exp, 0),
7929 adjust_address (target, TYPE_MODE (valtype), 0),
7930 modifier == EXPAND_STACK_PARM ? 2 : 0);
7932 else if (GET_CODE (target) == REG)
7933 /* Store this field into a union of the proper type. */
7934 store_field (target,
7935 MIN ((int_size_in_bytes (TREE_TYPE
7936 (TREE_OPERAND (exp, 0)))
7938 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7939 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7940 VOIDmode, 0, type, 0);
7944 /* Return the entire union. */
7948 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7950 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7953 /* If the signedness of the conversion differs and OP0 is
7954 a promoted SUBREG, clear that indication since we now
7955 have to do the proper extension. */
7956 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7957 && GET_CODE (op0) == SUBREG)
7958 SUBREG_PROMOTED_VAR_P (op0) = 0;
7963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7964 if (GET_MODE (op0) == mode)
7967 /* If OP0 is a constant, just convert it into the proper mode. */
7968 if (CONSTANT_P (op0))
7970 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7971 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7973 if (modifier == EXPAND_INITIALIZER)
7974 return simplify_gen_subreg (mode, op0, inner_mode,
7975 subreg_lowpart_offset (mode,
7978 return convert_modes (mode, inner_mode, op0,
7979 TREE_UNSIGNED (inner_type));
7982 if (modifier == EXPAND_INITIALIZER)
7983 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7987 convert_to_mode (mode, op0,
7988 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7990 convert_move (target, op0,
7991 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7994 case VIEW_CONVERT_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7997 /* If the input and output modes are both the same, we are done.
7998 Otherwise, if neither mode is BLKmode and both are integral and within
7999 a word, we can use gen_lowpart. If neither is true, make sure the
8000 operand is in memory and convert the MEM to the new mode. */
8001 if (TYPE_MODE (type) == GET_MODE (op0))
8003 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8004 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8005 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8006 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8007 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8008 op0 = gen_lowpart (TYPE_MODE (type), op0);
8009 else if (GET_CODE (op0) != MEM)
8011 /* If the operand is not a MEM, force it into memory. Since we
8012 are going to be be changing the mode of the MEM, don't call
8013 force_const_mem for constants because we don't allow pool
8014 constants to change mode. */
8015 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8017 if (TREE_ADDRESSABLE (exp))
8020 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8022 = assign_stack_temp_for_type
8023 (TYPE_MODE (inner_type),
8024 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8026 emit_move_insn (target, op0);
8030 /* At this point, OP0 is in the correct mode. If the output type is such
8031 that the operand is known to be aligned, indicate that it is.
8032 Otherwise, we need only be concerned about alignment for non-BLKmode
8034 if (GET_CODE (op0) == MEM)
8036 op0 = copy_rtx (op0);
8038 if (TYPE_ALIGN_OK (type))
8039 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8040 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8041 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8043 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8044 HOST_WIDE_INT temp_size
8045 = MAX (int_size_in_bytes (inner_type),
8046 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8047 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8048 temp_size, 0, type);
8049 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8051 if (TREE_ADDRESSABLE (exp))
8054 if (GET_MODE (op0) == BLKmode)
8055 emit_block_move (new_with_op0_mode, op0,
8056 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8057 (modifier == EXPAND_STACK_PARM
8058 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8060 emit_move_insn (new_with_op0_mode, op0);
8065 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8071 this_optab = ! unsignedp && flag_trapv
8072 && (GET_MODE_CLASS (mode) == MODE_INT)
8073 ? addv_optab : add_optab;
8075 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8076 something else, make sure we add the register to the constant and
8077 then to the other thing. This case can occur during strength
8078 reduction and doing it this way will produce better code if the
8079 frame pointer or argument pointer is eliminated.
8081 fold-const.c will ensure that the constant is always in the inner
8082 PLUS_EXPR, so the only case we need to do anything about is if
8083 sp, ap, or fp is our second argument, in which case we must swap
8084 the innermost first argument and our second argument. */
8086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8087 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8088 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8089 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8090 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8091 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8093 tree t = TREE_OPERAND (exp, 1);
8095 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8096 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8099 /* If the result is to be ptr_mode and we are adding an integer to
8100 something, we might be forming a constant. So try to use
8101 plus_constant. If it produces a sum and we can't accept it,
8102 use force_operand. This allows P = &ARR[const] to generate
8103 efficient code on machines where a SYMBOL_REF is not a valid
8106 If this is an EXPAND_SUM call, always return the sum. */
8107 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8108 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8110 if (modifier == EXPAND_STACK_PARM)
8112 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8113 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8114 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8118 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8120 /* Use immed_double_const to ensure that the constant is
8121 truncated according to the mode of OP1, then sign extended
8122 to a HOST_WIDE_INT. Using the constant directly can result
8123 in non-canonical RTL in a 64x32 cross compile. */
8125 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8127 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8128 op1 = plus_constant (op1, INTVAL (constant_part));
8129 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8130 op1 = force_operand (op1, target);
8134 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8135 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8136 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8140 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8141 (modifier == EXPAND_INITIALIZER
8142 ? EXPAND_INITIALIZER : EXPAND_SUM));
8143 if (! CONSTANT_P (op0))
8145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8146 VOIDmode, modifier);
8147 /* Return a PLUS if modifier says it's OK. */
8148 if (modifier == EXPAND_SUM
8149 || modifier == EXPAND_INITIALIZER)
8150 return simplify_gen_binary (PLUS, mode, op0, op1);
8153 /* Use immed_double_const to ensure that the constant is
8154 truncated according to the mode of OP1, then sign extended
8155 to a HOST_WIDE_INT. Using the constant directly can result
8156 in non-canonical RTL in a 64x32 cross compile. */
8158 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8160 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8161 op0 = plus_constant (op0, INTVAL (constant_part));
8162 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8163 op0 = force_operand (op0, target);
8168 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8171 /* No sense saving up arithmetic to be done
8172 if it's all in the wrong mode to form part of an address.
8173 And force_operand won't know whether to sign-extend or
8175 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8176 || mode != ptr_mode)
8178 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8179 subtarget, &op0, &op1, 0);
8180 if (op0 == const0_rtx)
8182 if (op1 == const0_rtx)
8187 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8188 subtarget, &op0, &op1, modifier);
8189 return simplify_gen_binary (PLUS, mode, op0, op1);
8192 /* For initializers, we are allowed to return a MINUS of two
8193 symbolic constants. Here we handle all cases when both operands
8195 /* Handle difference of two symbolic constants,
8196 for the sake of an initializer. */
8197 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8198 && really_constant_p (TREE_OPERAND (exp, 0))
8199 && really_constant_p (TREE_OPERAND (exp, 1)))
8201 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8202 NULL_RTX, &op0, &op1, modifier);
8204 /* If the last operand is a CONST_INT, use plus_constant of
8205 the negated constant. Else make the MINUS. */
8206 if (GET_CODE (op1) == CONST_INT)
8207 return plus_constant (op0, - INTVAL (op1));
8209 return gen_rtx_MINUS (mode, op0, op1);
8212 this_optab = ! unsignedp && flag_trapv
8213 && (GET_MODE_CLASS(mode) == MODE_INT)
8214 ? subv_optab : sub_optab;
8216 /* No sense saving up arithmetic to be done
8217 if it's all in the wrong mode to form part of an address.
8218 And force_operand won't know whether to sign-extend or
8220 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8221 || mode != ptr_mode)
8224 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8225 subtarget, &op0, &op1, modifier);
8227 /* Convert A - const to A + (-const). */
8228 if (GET_CODE (op1) == CONST_INT)
8230 op1 = negate_rtx (mode, op1);
8231 return simplify_gen_binary (PLUS, mode, op0, op1);
8237 /* If first operand is constant, swap them.
8238 Thus the following special case checks need only
8239 check the second operand. */
8240 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8242 tree t1 = TREE_OPERAND (exp, 0);
8243 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8244 TREE_OPERAND (exp, 1) = t1;
8247 /* Attempt to return something suitable for generating an
8248 indexed address, for machines that support that. */
8250 if (modifier == EXPAND_SUM && mode == ptr_mode
8251 && host_integerp (TREE_OPERAND (exp, 1), 0))
8253 tree exp1 = TREE_OPERAND (exp, 1);
8255 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8258 /* If we knew for certain that this is arithmetic for an array
8259 reference, and we knew the bounds of the array, then we could
8260 apply the distributive law across (PLUS X C) for constant C.
8261 Without such knowledge, we risk overflowing the computation
8262 when both X and C are large, but X+C isn't. */
8263 /* ??? Could perhaps special-case EXP being unsigned and C being
8264 positive. In that case we are certain that X+C is no smaller
8265 than X and so the transformed expression will overflow iff the
8266 original would have. */
8268 if (GET_CODE (op0) != REG)
8269 op0 = force_operand (op0, NULL_RTX);
8270 if (GET_CODE (op0) != REG)
8271 op0 = copy_to_mode_reg (mode, op0);
8273 return gen_rtx_MULT (mode, op0,
8274 gen_int_mode (tree_low_cst (exp1, 0),
8275 TYPE_MODE (TREE_TYPE (exp1))));
8278 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8281 if (modifier == EXPAND_STACK_PARM)
8284 /* Check for multiplying things that have been extended
8285 from a narrower type. If this machine supports multiplying
8286 in that narrower type with a result in the desired type,
8287 do it that way, and avoid the explicit type-conversion. */
8288 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8289 && TREE_CODE (type) == INTEGER_TYPE
8290 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8291 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8292 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8293 && int_fits_type_p (TREE_OPERAND (exp, 1),
8294 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8295 /* Don't use a widening multiply if a shift will do. */
8296 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8297 > HOST_BITS_PER_WIDE_INT)
8298 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8300 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8301 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8303 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8304 /* If both operands are extended, they must either both
8305 be zero-extended or both be sign-extended. */
8306 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8308 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8310 enum machine_mode innermode
8311 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8312 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8313 ? smul_widen_optab : umul_widen_optab);
8314 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8315 ? umul_widen_optab : smul_widen_optab);
8316 if (mode == GET_MODE_WIDER_MODE (innermode))
8318 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8320 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8321 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8322 TREE_OPERAND (exp, 1),
8323 NULL_RTX, &op0, &op1, 0);
8325 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8326 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8327 NULL_RTX, &op0, &op1, 0);
8330 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8331 && innermode == word_mode)
8334 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8335 NULL_RTX, VOIDmode, 0);
8336 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8337 op1 = convert_modes (innermode, mode,
8338 expand_expr (TREE_OPERAND (exp, 1),
8339 NULL_RTX, VOIDmode, 0),
8342 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8343 NULL_RTX, VOIDmode, 0);
8344 temp = expand_binop (mode, other_optab, op0, op1, target,
8345 unsignedp, OPTAB_LIB_WIDEN);
8346 htem = expand_mult_highpart_adjust (innermode,
8347 gen_highpart (innermode, temp),
8349 gen_highpart (innermode, temp),
8351 emit_move_insn (gen_highpart (innermode, temp), htem);
8356 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8357 subtarget, &op0, &op1, 0);
8358 return expand_mult (mode, op0, op1, target, unsignedp);
8360 case TRUNC_DIV_EXPR:
8361 case FLOOR_DIV_EXPR:
8363 case ROUND_DIV_EXPR:
8364 case EXACT_DIV_EXPR:
8365 if (modifier == EXPAND_STACK_PARM)
8367 /* Possible optimization: compute the dividend with EXPAND_SUM
8368 then if the divisor is constant can optimize the case
8369 where some terms of the dividend have coeffs divisible by it. */
8370 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8371 subtarget, &op0, &op1, 0);
8372 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8375 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8376 expensive divide. If not, combine will rebuild the original
8378 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8379 && TREE_CODE (type) == REAL_TYPE
8380 && !real_onep (TREE_OPERAND (exp, 0)))
8381 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8382 build (RDIV_EXPR, type,
8383 build_real (type, dconst1),
8384 TREE_OPERAND (exp, 1))),
8385 target, tmode, modifier);
8386 this_optab = sdiv_optab;
8389 case TRUNC_MOD_EXPR:
8390 case FLOOR_MOD_EXPR:
8392 case ROUND_MOD_EXPR:
8393 if (modifier == EXPAND_STACK_PARM)
8395 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8396 subtarget, &op0, &op1, 0);
8397 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8399 case FIX_ROUND_EXPR:
8400 case FIX_FLOOR_EXPR:
8402 abort (); /* Not used for C. */
8404 case FIX_TRUNC_EXPR:
8405 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8406 if (target == 0 || modifier == EXPAND_STACK_PARM)
8407 target = gen_reg_rtx (mode);
8408 expand_fix (target, op0, unsignedp);
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8413 if (target == 0 || modifier == EXPAND_STACK_PARM)
8414 target = gen_reg_rtx (mode);
8415 /* expand_float can't figure out what to do if FROM has VOIDmode.
8416 So give it the correct mode. With -O, cse will optimize this. */
8417 if (GET_MODE (op0) == VOIDmode)
8418 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8420 expand_float (target, op0,
8421 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8426 if (modifier == EXPAND_STACK_PARM)
8428 temp = expand_unop (mode,
8429 ! unsignedp && flag_trapv
8430 && (GET_MODE_CLASS(mode) == MODE_INT)
8431 ? negv_optab : neg_optab, op0, target, 0);
8437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8438 if (modifier == EXPAND_STACK_PARM)
8441 /* ABS_EXPR is not valid for complex arguments. */
8442 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8443 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8446 /* Unsigned abs is simply the operand. Testing here means we don't
8447 risk generating incorrect code below. */
8448 if (TREE_UNSIGNED (type))
8451 return expand_abs (mode, op0, target, unsignedp,
8452 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8456 target = original_target;
8458 || modifier == EXPAND_STACK_PARM
8459 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8460 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8461 || GET_MODE (target) != mode
8462 || (GET_CODE (target) == REG
8463 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8464 target = gen_reg_rtx (mode);
8465 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8466 target, &op0, &op1, 0);
8468 /* First try to do it with a special MIN or MAX instruction.
8469 If that does not win, use a conditional jump to select the proper
8471 this_optab = (TREE_UNSIGNED (type)
8472 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8473 : (code == MIN_EXPR ? smin_optab : smax_optab));
8475 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8480 /* At this point, a MEM target is no longer useful; we will get better
8483 if (GET_CODE (target) == MEM)
8484 target = gen_reg_rtx (mode);
8487 emit_move_insn (target, op0);
8489 op0 = gen_label_rtx ();
8491 /* If this mode is an integer too wide to compare properly,
8492 compare word by word. Rely on cse to optimize constant cases. */
8493 if (GET_MODE_CLASS (mode) == MODE_INT
8494 && ! can_compare_p (GE, mode, ccp_jump))
8496 if (code == MAX_EXPR)
8497 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8498 target, op1, NULL_RTX, op0);
8500 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8501 op1, target, NULL_RTX, op0);
8505 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8506 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8507 unsignedp, mode, NULL_RTX, NULL_RTX,
8510 emit_move_insn (target, op1);
8515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8516 if (modifier == EXPAND_STACK_PARM)
8518 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8523 /* ??? Can optimize bitwise operations with one arg constant.
8524 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8525 and (a bitwise1 b) bitwise2 b (etc)
8526 but that is probably not worth while. */
8528 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8529 boolean values when we want in all cases to compute both of them. In
8530 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8531 as actual zero-or-1 values and then bitwise anding. In cases where
8532 there cannot be any side effects, better code would be made by
8533 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8534 how to recognize those cases. */
8536 case TRUTH_AND_EXPR:
8538 this_optab = and_optab;
8543 this_optab = ior_optab;
8546 case TRUTH_XOR_EXPR:
8548 this_optab = xor_optab;
8555 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8557 if (modifier == EXPAND_STACK_PARM)
8559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8560 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8563 /* Could determine the answer when only additive constants differ. Also,
8564 the addition of one can be handled by changing the condition. */
8571 case UNORDERED_EXPR:
8578 temp = do_store_flag (exp,
8579 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8580 tmode != VOIDmode ? tmode : mode, 0);
8584 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8585 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8587 && GET_CODE (original_target) == REG
8588 && (GET_MODE (original_target)
8589 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8591 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8594 /* If temp is constant, we can just compute the result. */
8595 if (GET_CODE (temp) == CONST_INT)
8597 if (INTVAL (temp) != 0)
8598 emit_move_insn (target, const1_rtx);
8600 emit_move_insn (target, const0_rtx);
8605 if (temp != original_target)
8607 enum machine_mode mode1 = GET_MODE (temp);
8608 if (mode1 == VOIDmode)
8609 mode1 = tmode != VOIDmode ? tmode : mode;
8611 temp = copy_to_mode_reg (mode1, temp);
8614 op1 = gen_label_rtx ();
8615 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8616 GET_MODE (temp), unsignedp, op1);
8617 emit_move_insn (temp, const1_rtx);
8622 /* If no set-flag instruction, must generate a conditional
8623 store into a temporary variable. Drop through
8624 and handle this like && and ||. */
8626 case TRUTH_ANDIF_EXPR:
8627 case TRUTH_ORIF_EXPR:
8630 || modifier == EXPAND_STACK_PARM
8631 || ! safe_from_p (target, exp, 1)
8632 /* Make sure we don't have a hard reg (such as function's return
8633 value) live across basic blocks, if not optimizing. */
8634 || (!optimize && GET_CODE (target) == REG
8635 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8636 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8639 emit_clr_insn (target);
8641 op1 = gen_label_rtx ();
8642 jumpifnot (exp, op1);
8645 emit_0_to_1_insn (target);
8648 return ignore ? const0_rtx : target;
8650 case TRUTH_NOT_EXPR:
8651 if (modifier == EXPAND_STACK_PARM)
8653 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8654 /* The parser is careful to generate TRUTH_NOT_EXPR
8655 only with operands that are always zero or one. */
8656 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8657 target, 1, OPTAB_LIB_WIDEN);
8663 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8665 return expand_expr (TREE_OPERAND (exp, 1),
8666 (ignore ? const0_rtx : target),
8667 VOIDmode, modifier);
8670 /* If we would have a "singleton" (see below) were it not for a
8671 conversion in each arm, bring that conversion back out. */
8672 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8673 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8674 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8675 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8677 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8678 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8680 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8681 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8682 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8683 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8685 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8686 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8687 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8688 return expand_expr (build1 (NOP_EXPR, type,
8689 build (COND_EXPR, TREE_TYPE (iftrue),
8690 TREE_OPERAND (exp, 0),
8692 target, tmode, modifier);
8696 /* Note that COND_EXPRs whose type is a structure or union
8697 are required to be constructed to contain assignments of
8698 a temporary variable, so that we can evaluate them here
8699 for side effect only. If type is void, we must do likewise. */
8701 /* If an arm of the branch requires a cleanup,
8702 only that cleanup is performed. */
8705 tree binary_op = 0, unary_op = 0;
8707 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8708 convert it to our mode, if necessary. */
8709 if (integer_onep (TREE_OPERAND (exp, 1))
8710 && integer_zerop (TREE_OPERAND (exp, 2))
8711 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8715 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8720 if (modifier == EXPAND_STACK_PARM)
8722 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8723 if (GET_MODE (op0) == mode)
8727 target = gen_reg_rtx (mode);
8728 convert_move (target, op0, unsignedp);
8732 /* Check for X ? A + B : A. If we have this, we can copy A to the
8733 output and conditionally add B. Similarly for unary operations.
8734 Don't do this if X has side-effects because those side effects
8735 might affect A or B and the "?" operation is a sequence point in
8736 ANSI. (operand_equal_p tests for side effects.) */
8738 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8739 && operand_equal_p (TREE_OPERAND (exp, 2),
8740 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8741 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8742 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8743 && operand_equal_p (TREE_OPERAND (exp, 1),
8744 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8745 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8746 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8747 && operand_equal_p (TREE_OPERAND (exp, 2),
8748 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8749 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8750 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8751 && operand_equal_p (TREE_OPERAND (exp, 1),
8752 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8753 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8755 /* If we are not to produce a result, we have no target. Otherwise,
8756 if a target was specified use it; it will not be used as an
8757 intermediate target unless it is safe. If no target, use a
8762 else if (modifier == EXPAND_STACK_PARM)
8763 temp = assign_temp (type, 0, 0, 1);
8764 else if (original_target
8765 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8766 || (singleton && GET_CODE (original_target) == REG
8767 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8768 && original_target == var_rtx (singleton)))
8769 && GET_MODE (original_target) == mode
8770 #ifdef HAVE_conditional_move
8771 && (! can_conditionally_move_p (mode)
8772 || GET_CODE (original_target) == REG
8773 || TREE_ADDRESSABLE (type))
8775 && (GET_CODE (original_target) != MEM
8776 || TREE_ADDRESSABLE (type)))
8777 temp = original_target;
8778 else if (TREE_ADDRESSABLE (type))
8781 temp = assign_temp (type, 0, 0, 1);
8783 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8784 do the test of X as a store-flag operation, do this as
8785 A + ((X != 0) << log C). Similarly for other simple binary
8786 operators. Only do for C == 1 if BRANCH_COST is low. */
8787 if (temp && singleton && binary_op
8788 && (TREE_CODE (binary_op) == PLUS_EXPR
8789 || TREE_CODE (binary_op) == MINUS_EXPR
8790 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8791 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8792 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8793 : integer_onep (TREE_OPERAND (binary_op, 1)))
8794 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8798 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8799 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8800 ? addv_optab : add_optab)
8801 : TREE_CODE (binary_op) == MINUS_EXPR
8802 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8803 ? subv_optab : sub_optab)
8804 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8807 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8808 if (singleton == TREE_OPERAND (exp, 1))
8809 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8811 cond = TREE_OPERAND (exp, 0);
8813 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8815 mode, BRANCH_COST <= 1);
8817 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8818 result = expand_shift (LSHIFT_EXPR, mode, result,
8819 build_int_2 (tree_log2
8823 (safe_from_p (temp, singleton, 1)
8824 ? temp : NULL_RTX), 0);
8828 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8829 return expand_binop (mode, boptab, op1, result, temp,
8830 unsignedp, OPTAB_LIB_WIDEN);
8834 do_pending_stack_adjust ();
8836 op0 = gen_label_rtx ();
8838 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8842 /* If the target conflicts with the other operand of the
8843 binary op, we can't use it. Also, we can't use the target
8844 if it is a hard register, because evaluating the condition
8845 might clobber it. */
8847 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8848 || (GET_CODE (temp) == REG
8849 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8850 temp = gen_reg_rtx (mode);
8851 store_expr (singleton, temp,
8852 modifier == EXPAND_STACK_PARM ? 2 : 0);
8855 expand_expr (singleton,
8856 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8857 if (singleton == TREE_OPERAND (exp, 1))
8858 jumpif (TREE_OPERAND (exp, 0), op0);
8860 jumpifnot (TREE_OPERAND (exp, 0), op0);
8862 start_cleanup_deferral ();
8863 if (binary_op && temp == 0)
8864 /* Just touch the other operand. */
8865 expand_expr (TREE_OPERAND (binary_op, 1),
8866 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8868 store_expr (build (TREE_CODE (binary_op), type,
8869 make_tree (type, temp),
8870 TREE_OPERAND (binary_op, 1)),
8871 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8873 store_expr (build1 (TREE_CODE (unary_op), type,
8874 make_tree (type, temp)),
8875 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8878 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8879 comparison operator. If we have one of these cases, set the
8880 output to A, branch on A (cse will merge these two references),
8881 then set the output to FOO. */
8883 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8884 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8885 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8886 TREE_OPERAND (exp, 1), 0)
8887 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8888 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8889 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8891 if (GET_CODE (temp) == REG
8892 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8893 temp = gen_reg_rtx (mode);
8894 store_expr (TREE_OPERAND (exp, 1), temp,
8895 modifier == EXPAND_STACK_PARM ? 2 : 0);
8896 jumpif (TREE_OPERAND (exp, 0), op0);
8898 start_cleanup_deferral ();
8899 store_expr (TREE_OPERAND (exp, 2), temp,
8900 modifier == EXPAND_STACK_PARM ? 2 : 0);
8904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8905 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8906 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8907 TREE_OPERAND (exp, 2), 0)
8908 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8909 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8910 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8912 if (GET_CODE (temp) == REG
8913 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8914 temp = gen_reg_rtx (mode);
8915 store_expr (TREE_OPERAND (exp, 2), temp,
8916 modifier == EXPAND_STACK_PARM ? 2 : 0);
8917 jumpifnot (TREE_OPERAND (exp, 0), op0);
8919 start_cleanup_deferral ();
8920 store_expr (TREE_OPERAND (exp, 1), temp,
8921 modifier == EXPAND_STACK_PARM ? 2 : 0);
8926 op1 = gen_label_rtx ();
8927 jumpifnot (TREE_OPERAND (exp, 0), op0);
8929 start_cleanup_deferral ();
8931 /* One branch of the cond can be void, if it never returns. For
8932 example A ? throw : E */
8934 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8935 store_expr (TREE_OPERAND (exp, 1), temp,
8936 modifier == EXPAND_STACK_PARM ? 2 : 0);
8938 expand_expr (TREE_OPERAND (exp, 1),
8939 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8940 end_cleanup_deferral ();
8942 emit_jump_insn (gen_jump (op1));
8945 start_cleanup_deferral ();
8947 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8948 store_expr (TREE_OPERAND (exp, 2), temp,
8949 modifier == EXPAND_STACK_PARM ? 2 : 0);
8951 expand_expr (TREE_OPERAND (exp, 2),
8952 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8955 end_cleanup_deferral ();
8966 /* Something needs to be initialized, but we didn't know
8967 where that thing was when building the tree. For example,
8968 it could be the return value of a function, or a parameter
8969 to a function which lays down in the stack, or a temporary
8970 variable which must be passed by reference.
8972 We guarantee that the expression will either be constructed
8973 or copied into our original target. */
8975 tree slot = TREE_OPERAND (exp, 0);
8976 tree cleanups = NULL_TREE;
8979 if (TREE_CODE (slot) != VAR_DECL)
8983 target = original_target;
8985 /* Set this here so that if we get a target that refers to a
8986 register variable that's already been used, put_reg_into_stack
8987 knows that it should fix up those uses. */
8988 TREE_USED (slot) = 1;
8992 if (DECL_RTL_SET_P (slot))
8994 target = DECL_RTL (slot);
8995 /* If we have already expanded the slot, so don't do
8997 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9002 target = assign_temp (type, 2, 0, 1);
9003 /* All temp slots at this level must not conflict. */
9004 preserve_temp_slots (target);
9005 SET_DECL_RTL (slot, target);
9006 if (TREE_ADDRESSABLE (slot))
9007 put_var_into_stack (slot, /*rescan=*/false);
9009 /* Since SLOT is not known to the called function
9010 to belong to its stack frame, we must build an explicit
9011 cleanup. This case occurs when we must build up a reference
9012 to pass the reference as an argument. In this case,
9013 it is very likely that such a reference need not be
9016 if (TREE_OPERAND (exp, 2) == 0)
9017 TREE_OPERAND (exp, 2)
9018 = (*lang_hooks.maybe_build_cleanup) (slot);
9019 cleanups = TREE_OPERAND (exp, 2);
9024 /* This case does occur, when expanding a parameter which
9025 needs to be constructed on the stack. The target
9026 is the actual stack address that we want to initialize.
9027 The function we call will perform the cleanup in this case. */
9029 /* If we have already assigned it space, use that space,
9030 not target that we were passed in, as our target
9031 parameter is only a hint. */
9032 if (DECL_RTL_SET_P (slot))
9034 target = DECL_RTL (slot);
9035 /* If we have already expanded the slot, so don't do
9037 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9042 SET_DECL_RTL (slot, target);
9043 /* If we must have an addressable slot, then make sure that
9044 the RTL that we just stored in slot is OK. */
9045 if (TREE_ADDRESSABLE (slot))
9046 put_var_into_stack (slot, /*rescan=*/true);
9050 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9051 /* Mark it as expanded. */
9052 TREE_OPERAND (exp, 1) = NULL_TREE;
9054 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9056 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9063 tree lhs = TREE_OPERAND (exp, 0);
9064 tree rhs = TREE_OPERAND (exp, 1);
9066 temp = expand_assignment (lhs, rhs, ! ignore);
9072 /* If lhs is complex, expand calls in rhs before computing it.
9073 That's so we don't compute a pointer and save it over a
9074 call. If lhs is simple, compute it first so we can give it
9075 as a target if the rhs is just a call. This avoids an
9076 extra temp and copy and that prevents a partial-subsumption
9077 which makes bad code. Actually we could treat
9078 component_ref's of vars like vars. */
9080 tree lhs = TREE_OPERAND (exp, 0);
9081 tree rhs = TREE_OPERAND (exp, 1);
9085 /* Check for |= or &= of a bitfield of size one into another bitfield
9086 of size 1. In this case, (unless we need the result of the
9087 assignment) we can do this more efficiently with a
9088 test followed by an assignment, if necessary.
9090 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9091 things change so we do, this code should be enhanced to
9094 && TREE_CODE (lhs) == COMPONENT_REF
9095 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9096 || TREE_CODE (rhs) == BIT_AND_EXPR)
9097 && TREE_OPERAND (rhs, 0) == lhs
9098 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9099 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9100 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9102 rtx label = gen_label_rtx ();
9104 do_jump (TREE_OPERAND (rhs, 1),
9105 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9106 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9107 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9108 (TREE_CODE (rhs) == BIT_IOR_EXPR
9110 : integer_zero_node)),
9112 do_pending_stack_adjust ();
9117 temp = expand_assignment (lhs, rhs, ! ignore);
9123 if (!TREE_OPERAND (exp, 0))
9124 expand_null_return ();
9126 expand_return (TREE_OPERAND (exp, 0));
9129 case PREINCREMENT_EXPR:
9130 case PREDECREMENT_EXPR:
9131 return expand_increment (exp, 0, ignore);
9133 case POSTINCREMENT_EXPR:
9134 case POSTDECREMENT_EXPR:
9135 /* Faster to treat as pre-increment if result is not used. */
9136 return expand_increment (exp, ! ignore, ignore);
9139 if (modifier == EXPAND_STACK_PARM)
9141 /* Are we taking the address of a nested function? */
9142 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9143 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9144 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9145 && ! TREE_STATIC (exp))
9147 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9148 op0 = force_operand (op0, target);
9150 /* If we are taking the address of something erroneous, just
9152 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9154 /* If we are taking the address of a constant and are at the
9155 top level, we have to use output_constant_def since we can't
9156 call force_const_mem at top level. */
9158 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9159 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9161 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9164 /* We make sure to pass const0_rtx down if we came in with
9165 ignore set, to avoid doing the cleanups twice for something. */
9166 op0 = expand_expr (TREE_OPERAND (exp, 0),
9167 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9168 (modifier == EXPAND_INITIALIZER
9169 ? modifier : EXPAND_CONST_ADDRESS));
9171 /* If we are going to ignore the result, OP0 will have been set
9172 to const0_rtx, so just return it. Don't get confused and
9173 think we are taking the address of the constant. */
9177 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9178 clever and returns a REG when given a MEM. */
9179 op0 = protect_from_queue (op0, 1);
9181 /* We would like the object in memory. If it is a constant, we can
9182 have it be statically allocated into memory. For a non-constant,
9183 we need to allocate some memory and store the value into it. */
9185 if (CONSTANT_P (op0))
9186 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9188 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9189 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9190 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9192 /* If the operand is a SAVE_EXPR, we can deal with this by
9193 forcing the SAVE_EXPR into memory. */
9194 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9196 put_var_into_stack (TREE_OPERAND (exp, 0),
9198 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9202 /* If this object is in a register, it can't be BLKmode. */
9203 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9204 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9206 if (GET_CODE (op0) == PARALLEL)
9207 /* Handle calls that pass values in multiple
9208 non-contiguous locations. The Irix 6 ABI has examples
9210 emit_group_store (memloc, op0, inner_type,
9211 int_size_in_bytes (inner_type));
9213 emit_move_insn (memloc, op0);
9219 if (GET_CODE (op0) != MEM)
9222 mark_temp_addr_taken (op0);
9223 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9225 op0 = XEXP (op0, 0);
9226 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9227 op0 = convert_memory_address (ptr_mode, op0);
9231 /* If OP0 is not aligned as least as much as the type requires, we
9232 need to make a temporary, copy OP0 to it, and take the address of
9233 the temporary. We want to use the alignment of the type, not of
9234 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9235 the test for BLKmode means that can't happen. The test for
9236 BLKmode is because we never make mis-aligned MEMs with
9239 We don't need to do this at all if the machine doesn't have
9240 strict alignment. */
9241 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9242 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9244 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9246 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9249 if (TYPE_ALIGN_OK (inner_type))
9252 if (TREE_ADDRESSABLE (inner_type))
9254 /* We can't make a bitwise copy of this object, so fail. */
9255 error ("cannot take the address of an unaligned member");
9259 new = assign_stack_temp_for_type
9260 (TYPE_MODE (inner_type),
9261 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9262 : int_size_in_bytes (inner_type),
9263 1, build_qualified_type (inner_type,
9264 (TYPE_QUALS (inner_type)
9265 | TYPE_QUAL_CONST)));
9267 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9268 (modifier == EXPAND_STACK_PARM
9269 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9274 op0 = force_operand (XEXP (op0, 0), target);
9278 && GET_CODE (op0) != REG
9279 && modifier != EXPAND_CONST_ADDRESS
9280 && modifier != EXPAND_INITIALIZER
9281 && modifier != EXPAND_SUM)
9282 op0 = force_reg (Pmode, op0);
9284 if (GET_CODE (op0) == REG
9285 && ! REG_USERVAR_P (op0))
9286 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9288 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9289 op0 = convert_memory_address (ptr_mode, op0);
9293 case ENTRY_VALUE_EXPR:
9296 /* COMPLEX type for Extended Pascal & Fortran */
9299 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9302 /* Get the rtx code of the operands. */
9303 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9304 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9307 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9311 /* Move the real (op0) and imaginary (op1) parts to their location. */
9312 emit_move_insn (gen_realpart (mode, target), op0);
9313 emit_move_insn (gen_imagpart (mode, target), op1);
9315 insns = get_insns ();
9318 /* Complex construction should appear as a single unit. */
9319 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9320 each with a separate pseudo as destination.
9321 It's not correct for flow to treat them as a unit. */
9322 if (GET_CODE (target) != CONCAT)
9323 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9332 return gen_realpart (mode, op0);
9335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9336 return gen_imagpart (mode, op0);
9340 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9344 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9347 target = gen_reg_rtx (mode);
9351 /* Store the realpart and the negated imagpart to target. */
9352 emit_move_insn (gen_realpart (partmode, target),
9353 gen_realpart (partmode, op0));
9355 imag_t = gen_imagpart (partmode, target);
9356 temp = expand_unop (partmode,
9357 ! unsignedp && flag_trapv
9358 && (GET_MODE_CLASS(partmode) == MODE_INT)
9359 ? negv_optab : neg_optab,
9360 gen_imagpart (partmode, op0), imag_t, 0);
9362 emit_move_insn (imag_t, temp);
9364 insns = get_insns ();
9367 /* Conjugate should appear as a single unit
9368 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9369 each with a separate pseudo as destination.
9370 It's not correct for flow to treat them as a unit. */
9371 if (GET_CODE (target) != CONCAT)
9372 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9379 case TRY_CATCH_EXPR:
9381 tree handler = TREE_OPERAND (exp, 1);
9383 expand_eh_region_start ();
9385 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9387 expand_eh_region_end_cleanup (handler);
9392 case TRY_FINALLY_EXPR:
9394 tree try_block = TREE_OPERAND (exp, 0);
9395 tree finally_block = TREE_OPERAND (exp, 1);
9397 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9399 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9400 is not sufficient, so we cannot expand the block twice.
9401 So we play games with GOTO_SUBROUTINE_EXPR to let us
9402 expand the thing only once. */
9403 /* When not optimizing, we go ahead with this form since
9404 (1) user breakpoints operate more predictably without
9405 code duplication, and
9406 (2) we're not running any of the global optimizers
9407 that would explode in time/space with the highly
9408 connected CFG created by the indirect branching. */
9410 rtx finally_label = gen_label_rtx ();
9411 rtx done_label = gen_label_rtx ();
9412 rtx return_link = gen_reg_rtx (Pmode);
9413 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9414 (tree) finally_label, (tree) return_link);
9415 TREE_SIDE_EFFECTS (cleanup) = 1;
9417 /* Start a new binding layer that will keep track of all cleanup
9418 actions to be performed. */
9419 expand_start_bindings (2);
9420 target_temp_slot_level = temp_slot_level;
9422 expand_decl_cleanup (NULL_TREE, cleanup);
9423 op0 = expand_expr (try_block, target, tmode, modifier);
9425 preserve_temp_slots (op0);
9426 expand_end_bindings (NULL_TREE, 0, 0);
9427 emit_jump (done_label);
9428 emit_label (finally_label);
9429 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9430 emit_indirect_jump (return_link);
9431 emit_label (done_label);
9435 expand_start_bindings (2);
9436 target_temp_slot_level = temp_slot_level;
9438 expand_decl_cleanup (NULL_TREE, finally_block);
9439 op0 = expand_expr (try_block, target, tmode, modifier);
9441 preserve_temp_slots (op0);
9442 expand_end_bindings (NULL_TREE, 0, 0);
9448 case GOTO_SUBROUTINE_EXPR:
9450 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9451 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9452 rtx return_address = gen_label_rtx ();
9453 emit_move_insn (return_link,
9454 gen_rtx_LABEL_REF (Pmode, return_address));
9456 emit_label (return_address);
9461 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9464 return get_exception_pointer (cfun);
9467 /* Function descriptors are not valid except for as
9468 initialization constants, and should not be expanded. */
9472 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9475 /* Here to do an ordinary binary operator, generating an instruction
9476 from the optab already placed in `this_optab'. */
9478 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9479 subtarget, &op0, &op1, 0);
9481 if (modifier == EXPAND_STACK_PARM)
9483 temp = expand_binop (mode, this_optab, op0, op1, target,
9484 unsignedp, OPTAB_LIB_WIDEN);
9490 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9491 when applied to the address of EXP produces an address known to be
9492 aligned more than BIGGEST_ALIGNMENT. */
9495 is_aligning_offset (tree offset, tree exp)
9497 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9498 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9499 || TREE_CODE (offset) == NOP_EXPR
9500 || TREE_CODE (offset) == CONVERT_EXPR
9501 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9502 offset = TREE_OPERAND (offset, 0);
9504 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9505 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9506 if (TREE_CODE (offset) != BIT_AND_EXPR
9507 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9508 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9509 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9512 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9513 It must be NEGATE_EXPR. Then strip any more conversions. */
9514 offset = TREE_OPERAND (offset, 0);
9515 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9516 || TREE_CODE (offset) == NOP_EXPR
9517 || TREE_CODE (offset) == CONVERT_EXPR)
9518 offset = TREE_OPERAND (offset, 0);
9520 if (TREE_CODE (offset) != NEGATE_EXPR)
9523 offset = TREE_OPERAND (offset, 0);
9524 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9525 || TREE_CODE (offset) == NOP_EXPR
9526 || TREE_CODE (offset) == CONVERT_EXPR)
9527 offset = TREE_OPERAND (offset, 0);
9529 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9530 whose type is the same as EXP. */
9531 return (TREE_CODE (offset) == ADDR_EXPR
9532 && (TREE_OPERAND (offset, 0) == exp
9533 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9534 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9535 == TREE_TYPE (exp)))));
9538 /* Return the tree node if an ARG corresponds to a string constant or zero
9539 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9540 in bytes within the string that ARG is accessing. The type of the
9541 offset will be `sizetype'. */
9544 string_constant (tree arg, tree *ptr_offset)
9548 if (TREE_CODE (arg) == ADDR_EXPR
9549 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9551 *ptr_offset = size_zero_node;
9552 return TREE_OPERAND (arg, 0);
9554 else if (TREE_CODE (arg) == PLUS_EXPR)
9556 tree arg0 = TREE_OPERAND (arg, 0);
9557 tree arg1 = TREE_OPERAND (arg, 1);
9562 if (TREE_CODE (arg0) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9565 *ptr_offset = convert (sizetype, arg1);
9566 return TREE_OPERAND (arg0, 0);
9568 else if (TREE_CODE (arg1) == ADDR_EXPR
9569 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9571 *ptr_offset = convert (sizetype, arg0);
9572 return TREE_OPERAND (arg1, 0);
9579 /* Expand code for a post- or pre- increment or decrement
9580 and return the RTX for the result.
9581 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9584 expand_increment (tree exp, int post, int ignore)
9588 tree incremented = TREE_OPERAND (exp, 0);
9589 optab this_optab = add_optab;
9591 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9592 int op0_is_copy = 0;
9593 int single_insn = 0;
9594 /* 1 means we can't store into OP0 directly,
9595 because it is a subreg narrower than a word,
9596 and we don't dare clobber the rest of the word. */
9599 /* Stabilize any component ref that might need to be
9600 evaluated more than once below. */
9602 || TREE_CODE (incremented) == BIT_FIELD_REF
9603 || (TREE_CODE (incremented) == COMPONENT_REF
9604 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9605 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9606 incremented = stabilize_reference (incremented);
9607 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9608 ones into save exprs so that they don't accidentally get evaluated
9609 more than once by the code below. */
9610 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9611 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9612 incremented = save_expr (incremented);
9614 /* Compute the operands as RTX.
9615 Note whether OP0 is the actual lvalue or a copy of it:
9616 I believe it is a copy iff it is a register or subreg
9617 and insns were generated in computing it. */
9619 temp = get_last_insn ();
9620 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9622 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9623 in place but instead must do sign- or zero-extension during assignment,
9624 so we copy it into a new register and let the code below use it as
9627 Note that we can safely modify this SUBREG since it is know not to be
9628 shared (it was made by the expand_expr call above). */
9630 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9633 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9637 else if (GET_CODE (op0) == SUBREG
9638 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9640 /* We cannot increment this SUBREG in place. If we are
9641 post-incrementing, get a copy of the old value. Otherwise,
9642 just mark that we cannot increment in place. */
9644 op0 = copy_to_reg (op0);
9649 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9650 && temp != get_last_insn ());
9651 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9653 /* Decide whether incrementing or decrementing. */
9654 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9655 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9656 this_optab = sub_optab;
9658 /* Convert decrement by a constant into a negative increment. */
9659 if (this_optab == sub_optab
9660 && GET_CODE (op1) == CONST_INT)
9662 op1 = GEN_INT (-INTVAL (op1));
9663 this_optab = add_optab;
9666 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9667 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9669 /* For a preincrement, see if we can do this with a single instruction. */
9672 icode = (int) this_optab->handlers[(int) mode].insn_code;
9673 if (icode != (int) CODE_FOR_nothing
9674 /* Make sure that OP0 is valid for operands 0 and 1
9675 of the insn we want to queue. */
9676 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9677 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9678 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9682 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9683 then we cannot just increment OP0. We must therefore contrive to
9684 increment the original value. Then, for postincrement, we can return
9685 OP0 since it is a copy of the old value. For preincrement, expand here
9686 unless we can do it with a single insn.
9688 Likewise if storing directly into OP0 would clobber high bits
9689 we need to preserve (bad_subreg). */
9690 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9692 /* This is the easiest way to increment the value wherever it is.
9693 Problems with multiple evaluation of INCREMENTED are prevented
9694 because either (1) it is a component_ref or preincrement,
9695 in which case it was stabilized above, or (2) it is an array_ref
9696 with constant index in an array in a register, which is
9697 safe to reevaluate. */
9698 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9699 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9700 ? MINUS_EXPR : PLUS_EXPR),
9703 TREE_OPERAND (exp, 1));
9705 while (TREE_CODE (incremented) == NOP_EXPR
9706 || TREE_CODE (incremented) == CONVERT_EXPR)
9708 newexp = convert (TREE_TYPE (incremented), newexp);
9709 incremented = TREE_OPERAND (incremented, 0);
9712 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9713 return post ? op0 : temp;
9718 /* We have a true reference to the value in OP0.
9719 If there is an insn to add or subtract in this mode, queue it.
9720 Queueing the increment insn avoids the register shuffling
9721 that often results if we must increment now and first save
9722 the old value for subsequent use. */
9724 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9725 op0 = stabilize (op0);
9728 icode = (int) this_optab->handlers[(int) mode].insn_code;
9729 if (icode != (int) CODE_FOR_nothing
9730 /* Make sure that OP0 is valid for operands 0 and 1
9731 of the insn we want to queue. */
9732 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9733 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9735 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9736 op1 = force_reg (mode, op1);
9738 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9740 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9742 rtx addr = (general_operand (XEXP (op0, 0), mode)
9743 ? force_reg (Pmode, XEXP (op0, 0))
9744 : copy_to_reg (XEXP (op0, 0)));
9747 op0 = replace_equiv_address (op0, addr);
9748 temp = force_reg (GET_MODE (op0), op0);
9749 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9750 op1 = force_reg (mode, op1);
9752 /* The increment queue is LIFO, thus we have to `queue'
9753 the instructions in reverse order. */
9754 enqueue_insn (op0, gen_move_insn (op0, temp));
9755 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9760 /* Preincrement, or we can't increment with one simple insn. */
9762 /* Save a copy of the value before inc or dec, to return it later. */
9763 temp = value = copy_to_reg (op0);
9765 /* Arrange to return the incremented value. */
9766 /* Copy the rtx because expand_binop will protect from the queue,
9767 and the results of that would be invalid for us to return
9768 if our caller does emit_queue before using our result. */
9769 temp = copy_rtx (value = op0);
9771 /* Increment however we can. */
9772 op1 = expand_binop (mode, this_optab, value, op1, op0,
9773 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9775 /* Make sure the value is stored into OP0. */
9777 emit_move_insn (op0, op1);
9782 /* Generate code to calculate EXP using a store-flag instruction
9783 and return an rtx for the result. EXP is either a comparison
9784 or a TRUTH_NOT_EXPR whose operand is a comparison.
9786 If TARGET is nonzero, store the result there if convenient.
9788 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9791 Return zero if there is no suitable set-flag instruction
9792 available on this machine.
9794 Once expand_expr has been called on the arguments of the comparison,
9795 we are committed to doing the store flag, since it is not safe to
9796 re-evaluate the expression. We emit the store-flag insn by calling
9797 emit_store_flag, but only expand the arguments if we have a reason
9798 to believe that emit_store_flag will be successful. If we think that
9799 it will, but it isn't, we have to simulate the store-flag with a
9800 set/jump/set sequence. */
9803 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9806 tree arg0, arg1, type;
9808 enum machine_mode operand_mode;
9812 enum insn_code icode;
9813 rtx subtarget = target;
9816 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9817 result at the end. We can't simply invert the test since it would
9818 have already been inverted if it were valid. This case occurs for
9819 some floating-point comparisons. */
9821 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9822 invert = 1, exp = TREE_OPERAND (exp, 0);
9824 arg0 = TREE_OPERAND (exp, 0);
9825 arg1 = TREE_OPERAND (exp, 1);
9827 /* Don't crash if the comparison was erroneous. */
9828 if (arg0 == error_mark_node || arg1 == error_mark_node)
9831 type = TREE_TYPE (arg0);
9832 operand_mode = TYPE_MODE (type);
9833 unsignedp = TREE_UNSIGNED (type);
9835 /* We won't bother with BLKmode store-flag operations because it would mean
9836 passing a lot of information to emit_store_flag. */
9837 if (operand_mode == BLKmode)
9840 /* We won't bother with store-flag operations involving function pointers
9841 when function pointers must be canonicalized before comparisons. */
9842 #ifdef HAVE_canonicalize_funcptr_for_compare
9843 if (HAVE_canonicalize_funcptr_for_compare
9844 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9845 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9847 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9848 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9849 == FUNCTION_TYPE))))
9856 /* Get the rtx comparison code to use. We know that EXP is a comparison
9857 operation of some type. Some comparisons against 1 and -1 can be
9858 converted to comparisons with zero. Do so here so that the tests
9859 below will be aware that we have a comparison with zero. These
9860 tests will not catch constants in the first operand, but constants
9861 are rarely passed as the first operand. */
9863 switch (TREE_CODE (exp))
9872 if (integer_onep (arg1))
9873 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9875 code = unsignedp ? LTU : LT;
9878 if (! unsignedp && integer_all_onesp (arg1))
9879 arg1 = integer_zero_node, code = LT;
9881 code = unsignedp ? LEU : LE;
9884 if (! unsignedp && integer_all_onesp (arg1))
9885 arg1 = integer_zero_node, code = GE;
9887 code = unsignedp ? GTU : GT;
9890 if (integer_onep (arg1))
9891 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9893 code = unsignedp ? GEU : GE;
9896 case UNORDERED_EXPR:
9922 /* Put a constant second. */
9923 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9925 tem = arg0; arg0 = arg1; arg1 = tem;
9926 code = swap_condition (code);
9929 /* If this is an equality or inequality test of a single bit, we can
9930 do this by shifting the bit being tested to the low-order bit and
9931 masking the result with the constant 1. If the condition was EQ,
9932 we xor it with 1. This does not require an scc insn and is faster
9933 than an scc insn even if we have it.
9935 The code to make this transformation was moved into fold_single_bit_test,
9936 so we just call into the folder and expand its result. */
9938 if ((code == NE || code == EQ)
9939 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9940 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9942 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9943 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9945 target, VOIDmode, EXPAND_NORMAL);
9948 /* Now see if we are likely to be able to do this. Return if not. */
9949 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9952 icode = setcc_gen_code[(int) code];
9953 if (icode == CODE_FOR_nothing
9954 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9956 /* We can only do this if it is one of the special cases that
9957 can be handled without an scc insn. */
9958 if ((code == LT && integer_zerop (arg1))
9959 || (! only_cheap && code == GE && integer_zerop (arg1)))
9961 else if (BRANCH_COST >= 0
9962 && ! only_cheap && (code == NE || code == EQ)
9963 && TREE_CODE (type) != REAL_TYPE
9964 && ((abs_optab->handlers[(int) operand_mode].insn_code
9965 != CODE_FOR_nothing)
9966 || (ffs_optab->handlers[(int) operand_mode].insn_code
9967 != CODE_FOR_nothing)))
9973 if (! get_subtarget (target)
9974 || GET_MODE (subtarget) != operand_mode
9975 || ! safe_from_p (subtarget, arg1, 1))
9978 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9981 target = gen_reg_rtx (mode);
9983 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9984 because, if the emit_store_flag does anything it will succeed and
9985 OP0 and OP1 will not be used subsequently. */
9987 result = emit_store_flag (target, code,
9988 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9989 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9990 operand_mode, unsignedp, 1);
9995 result = expand_binop (mode, xor_optab, result, const1_rtx,
9996 result, 0, OPTAB_LIB_WIDEN);
10000 /* If this failed, we have to do this with set/compare/jump/set code. */
10001 if (GET_CODE (target) != REG
10002 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10003 target = gen_reg_rtx (GET_MODE (target));
10005 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10006 result = compare_from_rtx (op0, op1, code, unsignedp,
10007 operand_mode, NULL_RTX);
10008 if (GET_CODE (result) == CONST_INT)
10009 return (((result == const0_rtx && ! invert)
10010 || (result != const0_rtx && invert))
10011 ? const0_rtx : const1_rtx);
10013 /* The code of RESULT may not match CODE if compare_from_rtx
10014 decided to swap its operands and reverse the original code.
10016 We know that compare_from_rtx returns either a CONST_INT or
10017 a new comparison code, so it is safe to just extract the
10018 code from RESULT. */
10019 code = GET_CODE (result);
10021 label = gen_label_rtx ();
10022 if (bcc_gen_fctn[(int) code] == 0)
10025 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10026 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10027 emit_label (label);
10033 /* Stubs in case we haven't got a casesi insn. */
10034 #ifndef HAVE_casesi
10035 # define HAVE_casesi 0
10036 # define gen_casesi(a, b, c, d, e) (0)
10037 # define CODE_FOR_casesi CODE_FOR_nothing
10040 /* If the machine does not have a case insn that compares the bounds,
10041 this means extra overhead for dispatch tables, which raises the
10042 threshold for using them. */
10043 #ifndef CASE_VALUES_THRESHOLD
10044 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10045 #endif /* CASE_VALUES_THRESHOLD */
10048 case_values_threshold (void)
10050 return CASE_VALUES_THRESHOLD;
10053 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10054 0 otherwise (i.e. if there is no casesi instruction). */
10056 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10057 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10059 enum machine_mode index_mode = SImode;
10060 int index_bits = GET_MODE_BITSIZE (index_mode);
10061 rtx op1, op2, index;
10062 enum machine_mode op_mode;
10067 /* Convert the index to SImode. */
10068 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10070 enum machine_mode omode = TYPE_MODE (index_type);
10071 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10073 /* We must handle the endpoints in the original mode. */
10074 index_expr = build (MINUS_EXPR, index_type,
10075 index_expr, minval);
10076 minval = integer_zero_node;
10077 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10078 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10079 omode, 1, default_label);
10080 /* Now we can safely truncate. */
10081 index = convert_to_mode (index_mode, index, 0);
10085 if (TYPE_MODE (index_type) != index_mode)
10087 index_expr = convert ((*lang_hooks.types.type_for_size)
10088 (index_bits, 0), index_expr);
10089 index_type = TREE_TYPE (index_expr);
10092 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10095 index = protect_from_queue (index, 0);
10096 do_pending_stack_adjust ();
10098 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10099 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10101 index = copy_to_mode_reg (op_mode, index);
10103 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10105 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10106 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10107 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10108 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10110 op1 = copy_to_mode_reg (op_mode, op1);
10112 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10114 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10115 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10116 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10117 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10119 op2 = copy_to_mode_reg (op_mode, op2);
10121 emit_jump_insn (gen_casesi (index, op1, op2,
10122 table_label, default_label));
10126 /* Attempt to generate a tablejump instruction; same concept. */
10127 #ifndef HAVE_tablejump
10128 #define HAVE_tablejump 0
10129 #define gen_tablejump(x, y) (0)
10132 /* Subroutine of the next function.
10134 INDEX is the value being switched on, with the lowest value
10135 in the table already subtracted.
10136 MODE is its expected mode (needed if INDEX is constant).
10137 RANGE is the length of the jump table.
10138 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10140 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10141 index value is out of range. */
10144 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10149 if (INTVAL (range) > cfun->max_jumptable_ents)
10150 cfun->max_jumptable_ents = INTVAL (range);
10152 /* Do an unsigned comparison (in the proper mode) between the index
10153 expression and the value which represents the length of the range.
10154 Since we just finished subtracting the lower bound of the range
10155 from the index expression, this comparison allows us to simultaneously
10156 check that the original index expression value is both greater than
10157 or equal to the minimum value of the range and less than or equal to
10158 the maximum value of the range. */
10160 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10163 /* If index is in range, it must fit in Pmode.
10164 Convert to Pmode so we can index with it. */
10166 index = convert_to_mode (Pmode, index, 1);
10168 /* Don't let a MEM slip thru, because then INDEX that comes
10169 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10170 and break_out_memory_refs will go to work on it and mess it up. */
10171 #ifdef PIC_CASE_VECTOR_ADDRESS
10172 if (flag_pic && GET_CODE (index) != REG)
10173 index = copy_to_mode_reg (Pmode, index);
10176 /* If flag_force_addr were to affect this address
10177 it could interfere with the tricky assumptions made
10178 about addresses that contain label-refs,
10179 which may be valid only very near the tablejump itself. */
10180 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10181 GET_MODE_SIZE, because this indicates how large insns are. The other
10182 uses should all be Pmode, because they are addresses. This code
10183 could fail if addresses and insns are not the same size. */
10184 index = gen_rtx_PLUS (Pmode,
10185 gen_rtx_MULT (Pmode, index,
10186 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10187 gen_rtx_LABEL_REF (Pmode, table_label));
10188 #ifdef PIC_CASE_VECTOR_ADDRESS
10190 index = PIC_CASE_VECTOR_ADDRESS (index);
10193 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10194 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10195 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10196 RTX_UNCHANGING_P (vector) = 1;
10197 MEM_NOTRAP_P (vector) = 1;
10198 convert_move (temp, vector, 0);
10200 emit_jump_insn (gen_tablejump (temp, table_label));
10202 /* If we are generating PIC code or if the table is PC-relative, the
10203 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10204 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10209 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10210 rtx table_label, rtx default_label)
10214 if (! HAVE_tablejump)
10217 index_expr = fold (build (MINUS_EXPR, index_type,
10218 convert (index_type, index_expr),
10219 convert (index_type, minval)));
10220 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10222 index = protect_from_queue (index, 0);
10223 do_pending_stack_adjust ();
10225 do_tablejump (index, TYPE_MODE (index_type),
10226 convert_modes (TYPE_MODE (index_type),
10227 TYPE_MODE (TREE_TYPE (range)),
10228 expand_expr (range, NULL_RTX,
10230 TREE_UNSIGNED (TREE_TYPE (range))),
10231 table_label, default_label);
10235 /* Nonzero if the mode is a valid vector mode for this architecture.
10236 This returns nonzero even if there is no hardware support for the
10237 vector mode, but we can emulate with narrower modes. */
10240 vector_mode_valid_p (enum machine_mode mode)
10242 enum mode_class class = GET_MODE_CLASS (mode);
10243 enum machine_mode innermode;
10245 /* Doh! What's going on? */
10246 if (class != MODE_VECTOR_INT
10247 && class != MODE_VECTOR_FLOAT)
10250 /* Hardware support. Woo hoo! */
10251 if (VECTOR_MODE_SUPPORTED_P (mode))
10254 innermode = GET_MODE_INNER (mode);
10256 /* We should probably return 1 if requesting V4DI and we have no DI,
10257 but we have V2DI, but this is probably very unlikely. */
10259 /* If we have support for the inner mode, we can safely emulate it.
10260 We may not have V2DI, but me can emulate with a pair of DIs. */
10261 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10264 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10266 const_vector_from_tree (tree exp)
10271 enum machine_mode inner, mode;
10273 mode = TYPE_MODE (TREE_TYPE (exp));
10275 if (is_zeros_p (exp))
10276 return CONST0_RTX (mode);
10278 units = GET_MODE_NUNITS (mode);
10279 inner = GET_MODE_INNER (mode);
10281 v = rtvec_alloc (units);
10283 link = TREE_VECTOR_CST_ELTS (exp);
10284 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10286 elt = TREE_VALUE (link);
10288 if (TREE_CODE (elt) == REAL_CST)
10289 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10292 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10293 TREE_INT_CST_HIGH (elt),
10297 /* Initialize remaining elements to 0. */
10298 for (; i < units; ++i)
10299 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10301 return gen_rtx_raw_CONST_VECTOR (mode, v);
10304 #include "gt-expr.h"