1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static int mostly_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
173 static rtx const_vector_from_tree (tree);
175 /* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
179 static char direct_load[NUM_MACHINE_MODES];
180 static char direct_store[NUM_MACHINE_MODES];
182 /* Record for each mode whether we can float-extend from memory. */
184 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210 #define CLEAR_RATIO 2
212 /* If we are optimizing for space, cut down the default clear ratio. */
213 #define CLEAR_RATIO (optimize_size ? 3 : 15)
217 /* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219 #ifndef CLEAR_BY_PIECES_P
220 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 /* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227 #ifndef STORE_BY_PIECES_P
228 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
243 /* This is run once per compilation to set up which modes can be used
244 directly in memory and to initialize the block move optab. */
247 init_expr_once (void)
250 enum machine_mode mode;
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg = gen_rtx_REG (VOIDmode, -1);
265 insn = rtx_alloc (INSN);
266 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267 PATTERN (insn) = pat;
269 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270 mode = (enum machine_mode) ((int) mode + 1))
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
277 PUT_MODE (reg, mode);
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
282 if (mode != VOIDmode && mode != BLKmode)
283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
287 if (! HARD_REGNO_MODE_OK (regno, mode))
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
297 SET_SRC (pat) = mem1;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
303 SET_DEST (pat) = mem;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
308 SET_DEST (pat) = mem1;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
314 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317 mode = GET_MODE_WIDER_MODE (mode))
319 enum machine_mode srcmode;
320 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321 srcmode = GET_MODE_WIDER_MODE (srcmode))
325 ic = can_extend_p (mode, srcmode, 0);
326 if (ic == CODE_FOR_nothing)
329 PUT_MODE (mem, srcmode);
331 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332 float_extend_from_mem[mode][srcmode] = true;
337 /* This is run at the start of compiling a function. */
342 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
345 pending_stack_adjust = 0;
346 stack_pointer_delta = 0;
347 inhibit_defer_pop = 0;
349 apply_args_value = 0;
353 /* Small sanity check that the queue is empty at the end of a function. */
356 finish_expr_for_function (void)
362 /* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
365 /* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
373 enqueue_insn (rtx var, rtx body)
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (rtx x, int modify)
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
480 queued_subexp_p (rtx x)
482 enum rtx_code code = GET_CODE (x);
488 return queued_subexp_p (XEXP (x, 0));
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
499 /* Perform all the pending incrementations. */
505 while ((p = pending_chain))
507 rtx body = QUEUED_BODY (p);
509 switch (GET_CODE (body))
517 QUEUED_INSN (p) = body;
521 #ifdef ENABLE_CHECKING
528 QUEUED_INSN (p) = emit_insn (body);
532 pending_chain = QUEUED_NEXT (p);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
542 convert_move (rtx to, rtx from, int unsignedp)
544 enum machine_mode to_mode = GET_MODE (to);
545 enum machine_mode from_mode = GET_MODE (from);
546 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
547 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 /* rtx code for making an equivalent value. */
552 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
553 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
558 if (to_real != from_real)
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 emit_move_insn (to, from);
581 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
583 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 if (VECTOR_MODE_P (to_mode))
587 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
591 emit_move_insn (to, from);
595 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
597 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
598 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
602 if (to_real != from_real)
609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
615 emit_unop_insn (code, to, from, UNKNOWN);
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
771 libcall = extendsfdf2_libfunc;
775 libcall = extendsfxf2_libfunc;
779 libcall = extendsftf2_libfunc;
791 libcall = truncdfsf2_libfunc;
795 libcall = extenddfxf2_libfunc;
799 libcall = extenddftf2_libfunc;
811 libcall = truncxfsf2_libfunc;
815 libcall = truncxfdf2_libfunc;
827 libcall = trunctfsf2_libfunc;
831 libcall = trunctfdf2_libfunc;
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
848 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
850 insns = get_insns ();
852 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
857 /* Now both modes are integers. */
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
881 emit_unop_insn (code, to, from, equiv_code);
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
889 if (GET_CODE (to) == REG)
890 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
897 /* No special multiword conversion insn; do it by hand. */
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
910 lowpart_mode = from_mode;
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
917 /* Compute the value to put in each remaining word. */
919 fill_value = const0_rtx;
924 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
925 && STORE_FLAG_VALUE == -1)
927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
956 insns = get_insns ();
959 emit_no_conflict_block (insns, to, from, NULL_RTX,
960 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
975 convert_move (to, gen_lowpart (word_mode, from), 0);
979 /* Handle pointer conversion. */ /* SPEE 900220. */
980 if (to_mode == PQImode)
982 if (from_mode != QImode)
983 from = convert_to_mode (QImode, from, unsignedp);
985 #ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2)
988 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
991 #endif /* HAVE_truncqipqi2 */
995 if (from_mode == PQImode)
997 if (to_mode != QImode)
999 from = convert_to_mode (QImode, from, unsignedp);
1004 #ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2)
1007 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1010 #endif /* HAVE_extendpqiqi2 */
1015 if (to_mode == PSImode)
1017 if (from_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1020 #ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2)
1023 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1026 #endif /* HAVE_truncsipsi2 */
1030 if (from_mode == PSImode)
1032 if (to_mode != SImode)
1034 from = convert_to_mode (SImode, from, unsignedp);
1039 #ifdef HAVE_extendpsisi2
1040 if (! unsignedp && HAVE_extendpsisi2)
1042 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1045 #endif /* HAVE_extendpsisi2 */
1046 #ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp && HAVE_zero_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_zero_extendpsisi2 */
1057 if (to_mode == PDImode)
1059 if (from_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1062 #ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2)
1065 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1068 #endif /* HAVE_truncdipdi2 */
1072 if (from_mode == PDImode)
1074 if (to_mode != DImode)
1076 from = convert_to_mode (DImode, from, unsignedp);
1081 #ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2)
1084 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1087 #endif /* HAVE_extendpdidi2 */
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (from_mode)))
1100 if (!((GET_CODE (from) == MEM
1101 && ! MEM_VOLATILE_P (from)
1102 && direct_load[(int) to_mode]
1103 && ! mode_dependent_address_p (XEXP (from, 0)))
1104 || GET_CODE (from) == REG
1105 || GET_CODE (from) == SUBREG))
1106 from = force_reg (from_mode, from);
1107 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1109 from = copy_to_reg (from);
1110 emit_move_insn (to, gen_lowpart (to_mode, from));
1114 /* Handle extension. */
1115 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1117 /* Convert directly if that works. */
1118 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1119 != CODE_FOR_nothing)
1122 from = force_not_mem (from);
1124 emit_unop_insn (code, to, from, equiv_code);
1129 enum machine_mode intermediate;
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1140 GET_MODE_BITSIZE (intermediate))))
1141 && (can_extend_p (intermediate, from_mode, unsignedp)
1142 != CODE_FOR_nothing))
1144 convert_move (to, convert_to_mode (intermediate, from,
1145 unsignedp), unsignedp);
1149 /* No suitable intermediate mode.
1150 Generate what we need with shifts. */
1151 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1152 - GET_MODE_BITSIZE (from_mode), 0);
1153 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1154 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1156 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1159 emit_move_insn (to, tmp);
1164 /* Support special truncate insns for certain modes. */
1166 if (from_mode == DImode && to_mode == SImode)
1168 #ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2)
1171 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == DImode && to_mode == HImode)
1181 #ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2)
1184 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == DImode && to_mode == QImode)
1194 #ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2)
1197 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == SImode && to_mode == HImode)
1207 #ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2)
1210 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == SImode && to_mode == QImode)
1220 #ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2)
1223 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 if (from_mode == HImode && to_mode == QImode)
1233 #ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2)
1236 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 if (from_mode == TImode && to_mode == DImode)
1246 #ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2)
1249 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1257 if (from_mode == TImode && to_mode == SImode)
1259 #ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2)
1262 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1266 convert_move (to, force_reg (from_mode, from), unsignedp);
1270 if (from_mode == TImode && to_mode == HImode)
1272 #ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2)
1275 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1279 convert_move (to, force_reg (from_mode, from), unsignedp);
1283 if (from_mode == TImode && to_mode == QImode)
1285 #ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2)
1288 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1292 convert_move (to, force_reg (from_mode, from), unsignedp);
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1301 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1302 emit_move_insn (to, temp);
1306 /* Mode combination is not recognized. */
1310 /* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
1321 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1323 return convert_modes (mode, VOIDmode, x, unsignedp);
1326 /* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1340 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1347 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1350 x = gen_lowpart (mode, x);
1352 if (GET_MODE (x) != VOIDmode)
1353 oldmode = GET_MODE (x);
1355 if (mode == oldmode)
1358 /* There is one case that we must handle specially: If we are converting
1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1364 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1366 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1368 HOST_WIDE_INT val = INTVAL (x);
1370 if (oldmode != VOIDmode
1371 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We need to zero extend VAL. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1387 if ((GET_CODE (x) == CONST_INT
1388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1389 || (GET_MODE_CLASS (mode) == MODE_INT
1390 && GET_MODE_CLASS (oldmode) == MODE_INT
1391 && (GET_CODE (x) == CONST_DOUBLE
1392 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1393 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1394 && direct_load[(int) mode])
1395 || (GET_CODE (x) == REG
1396 && (! HARD_REGISTER_P (x)
1397 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1417 return gen_int_mode (val, mode);
1420 return gen_lowpart (mode, x);
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1428 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1429 store efficiently. Due to internal GCC limitations, this is
1430 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1431 for an immediate constant. */
1433 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1435 /* Determine whether the LEN bytes can be moved by using several move
1436 instructions. Return nonzero if a call to move_by_pieces should
1440 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1441 unsigned int align ATTRIBUTE_UNUSED)
1443 return MOVE_BY_PIECES_P (len, align);
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1453 ALIGN is maximum stack alignment we can assume.
1455 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1456 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1460 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1461 unsigned int align, int endp)
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1469 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1472 data.from_addr = from_addr;
1475 to_addr = XEXP (to, 0);
1478 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1479 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1481 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1488 #ifdef STACK_GROWS_DOWNWARD
1494 data.to_addr = to_addr;
1497 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1498 || GET_CODE (from_addr) == POST_INC
1499 || GET_CODE (from_addr) == POST_DEC);
1501 data.explicit_inc_from = 0;
1502 data.explicit_inc_to = 0;
1503 if (data.reverse) data.offset = len;
1506 /* If copying requires more than two move insns,
1507 copy addresses to registers (to make displacements shorter)
1508 and use post-increment if available. */
1509 if (!(data.autinc_from && data.autinc_to)
1510 && move_by_pieces_ninsns (len, align) > 2)
1512 /* Find the mode of the largest move... */
1513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515 if (GET_MODE_SIZE (tmode) < max_size)
1518 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1520 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1521 data.autinc_from = 1;
1522 data.explicit_inc_from = -1;
1524 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = 1;
1530 if (!data.autinc_from && CONSTANT_P (from_addr))
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1534 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1536 data.explicit_inc_to = -1;
1538 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1540 data.to_addr = copy_addr_to_reg (to_addr);
1542 data.explicit_inc_to = 1;
1544 if (!data.autinc_to && CONSTANT_P (to_addr))
1545 data.to_addr = copy_addr_to_reg (to_addr);
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550 align = MOVE_MAX * BITS_PER_UNIT;
1552 /* First move what we can in the largest integer mode, then go to
1553 successively smaller modes. */
1555 while (max_size > 1)
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1562 if (mode == VOIDmode)
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1569 max_size = GET_MODE_SIZE (mode);
1572 /* The code above should have handled everything. */
1586 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1587 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1589 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1592 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1599 to1 = adjust_address (data.to, QImode, data.offset);
1607 /* Return number of insns required to move L bytes by pieces.
1608 ALIGN (in bits) is maximum alignment we can assume. */
1610 static unsigned HOST_WIDE_INT
1611 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1613 unsigned HOST_WIDE_INT n_insns = 0;
1614 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1616 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1617 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1618 align = MOVE_MAX * BITS_PER_UNIT;
1620 while (max_size > 1)
1622 enum machine_mode mode = VOIDmode, tmode;
1623 enum insn_code icode;
1625 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1626 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1627 if (GET_MODE_SIZE (tmode) < max_size)
1630 if (mode == VOIDmode)
1633 icode = mov_optab->handlers[(int) mode].insn_code;
1634 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1635 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1637 max_size = GET_MODE_SIZE (mode);
1645 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1646 with move instructions for mode MODE. GENFUN is the gen_... function
1647 to make a move insn for that mode. DATA has all the other info. */
1650 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1651 struct move_by_pieces *data)
1653 unsigned int size = GET_MODE_SIZE (mode);
1654 rtx to1 = NULL_RTX, from1;
1656 while (data->len >= size)
1659 data->offset -= size;
1663 if (data->autinc_to)
1664 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1667 to1 = adjust_address (data->to, mode, data->offset);
1670 if (data->autinc_from)
1671 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1674 from1 = adjust_address (data->from, mode, data->offset);
1676 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1677 emit_insn (gen_add2_insn (data->to_addr,
1678 GEN_INT (-(HOST_WIDE_INT)size)));
1679 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1680 emit_insn (gen_add2_insn (data->from_addr,
1681 GEN_INT (-(HOST_WIDE_INT)size)));
1684 emit_insn ((*genfun) (to1, from1));
1687 #ifdef PUSH_ROUNDING
1688 emit_single_push_insn (mode, from1, NULL);
1694 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1695 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1696 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1697 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1699 if (! data->reverse)
1700 data->offset += size;
1706 /* Emit code to move a block Y to a block X. This may be done with
1707 string-move instructions, with multiple scalar move instructions,
1708 or with a library call.
1710 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1711 SIZE is an rtx that says how long they are.
1712 ALIGN is the maximum alignment we can assume they have.
1713 METHOD describes what kind of copy this is, and what mechanisms may be used.
1715 Return the address of the new block, if memcpy is called and returns it,
1719 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1727 case BLOCK_OP_NORMAL:
1728 may_use_call = true;
1731 case BLOCK_OP_CALL_PARM:
1732 may_use_call = block_move_libcall_safe_for_call_parm ();
1734 /* Make inhibit_defer_pop nonzero around the library call
1735 to force it to pop the arguments right away. */
1739 case BLOCK_OP_NO_LIBCALL:
1740 may_use_call = false;
1747 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1749 if (GET_MODE (x) != BLKmode)
1751 if (GET_MODE (y) != BLKmode)
1754 x = protect_from_queue (x, 1);
1755 y = protect_from_queue (y, 0);
1756 size = protect_from_queue (size, 0);
1758 if (GET_CODE (x) != MEM)
1760 if (GET_CODE (y) != MEM)
1765 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1766 can be incorrect is coming from __builtin_memcpy. */
1767 if (GET_CODE (size) == CONST_INT)
1769 if (INTVAL (size) == 0)
1772 x = shallow_copy_rtx (x);
1773 y = shallow_copy_rtx (y);
1774 set_mem_size (x, size);
1775 set_mem_size (y, size);
1778 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1779 move_by_pieces (x, y, INTVAL (size), align, 0);
1780 else if (emit_block_move_via_movstr (x, y, size, align))
1782 else if (may_use_call)
1783 retval = emit_block_move_via_libcall (x, y, size);
1785 emit_block_move_via_loop (x, y, size, align);
1787 if (method == BLOCK_OP_CALL_PARM)
1793 /* A subroutine of emit_block_move. Returns true if calling the
1794 block move libcall will not clobber any parameters which may have
1795 already been placed on the stack. */
1798 block_move_libcall_safe_for_call_parm (void)
1804 /* Check to see whether memcpy takes all register arguments. */
1806 takes_regs_uninit, takes_regs_no, takes_regs_yes
1807 } takes_regs = takes_regs_uninit;
1811 case takes_regs_uninit:
1813 CUMULATIVE_ARGS args_so_far;
1816 fn = emit_block_move_libcall_fn (false);
1817 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1819 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1820 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1822 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1823 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1824 if (!tmp || !REG_P (tmp))
1825 goto fail_takes_regs;
1826 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1827 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1829 goto fail_takes_regs;
1831 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1834 takes_regs = takes_regs_yes;
1837 case takes_regs_yes:
1841 takes_regs = takes_regs_no;
1852 /* A subroutine of emit_block_move. Expand a movstr pattern;
1853 return true if successful. */
1856 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1858 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1859 enum machine_mode mode;
1861 /* Since this is a move insn, we don't care about volatility. */
1864 /* Try the most limited insn first, because there's no point
1865 including more than one in the machine description unless
1866 the more limited one has some advantage. */
1868 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1869 mode = GET_MODE_WIDER_MODE (mode))
1871 enum insn_code code = movstr_optab[(int) mode];
1872 insn_operand_predicate_fn pred;
1874 if (code != CODE_FOR_nothing
1875 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1876 here because if SIZE is less than the mode mask, as it is
1877 returned by the macro, it will definitely be less than the
1878 actual mode mask. */
1879 && ((GET_CODE (size) == CONST_INT
1880 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1881 <= (GET_MODE_MASK (mode) >> 1)))
1882 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1883 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1884 || (*pred) (x, BLKmode))
1885 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1886 || (*pred) (y, BLKmode))
1887 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1888 || (*pred) (opalign, VOIDmode)))
1891 rtx last = get_last_insn ();
1894 op2 = convert_to_mode (mode, size, 1);
1895 pred = insn_data[(int) code].operand[2].predicate;
1896 if (pred != 0 && ! (*pred) (op2, mode))
1897 op2 = copy_to_mode_reg (mode, op2);
1899 /* ??? When called via emit_block_move_for_call, it'd be
1900 nice if there were some way to inform the backend, so
1901 that it doesn't fail the expansion because it thinks
1902 emitting the libcall would be more efficient. */
1904 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1912 delete_insns_since (last);
1920 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1921 Return the return value from memcpy, 0 otherwise. */
1924 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1926 rtx dst_addr, src_addr;
1927 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1928 enum machine_mode size_mode;
1931 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1933 It is unsafe to save the value generated by protect_from_queue and reuse
1934 it later. Consider what happens if emit_queue is called before the
1935 return value from protect_from_queue is used.
1937 Expansion of the CALL_EXPR below will call emit_queue before we are
1938 finished emitting RTL for argument setup. So if we are not careful we
1939 could get the wrong value for an argument.
1941 To avoid this problem we go ahead and emit code to copy the addresses of
1942 DST and SRC and SIZE into new pseudos. We can then place those new
1943 pseudos into an RTL_EXPR and use them later, even after a call to
1946 Note this is not strictly needed for library calls since they do not call
1947 emit_queue before loading their arguments. However, we may need to have
1948 library calls call emit_queue in the future since failing to do so could
1949 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1950 arguments in registers. */
1952 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1953 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1955 #ifdef POINTERS_EXTEND_UNSIGNED
1956 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1957 src_addr = convert_memory_address (ptr_mode, src_addr);
1960 dst_tree = make_tree (ptr_type_node, dst_addr);
1961 src_tree = make_tree (ptr_type_node, src_addr);
1963 if (TARGET_MEM_FUNCTIONS)
1964 size_mode = TYPE_MODE (sizetype);
1966 size_mode = TYPE_MODE (unsigned_type_node);
1968 size = convert_to_mode (size_mode, size, 1);
1969 size = copy_to_mode_reg (size_mode, size);
1971 /* It is incorrect to use the libcall calling conventions to call
1972 memcpy in this context. This could be a user call to memcpy and
1973 the user may wish to examine the return value from memcpy. For
1974 targets where libcalls and normal calls have different conventions
1975 for returning pointers, we could end up generating incorrect code.
1977 For convenience, we generate the call to bcopy this way as well. */
1979 if (TARGET_MEM_FUNCTIONS)
1980 size_tree = make_tree (sizetype, size);
1982 size_tree = make_tree (unsigned_type_node, size);
1984 fn = emit_block_move_libcall_fn (true);
1985 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1986 if (TARGET_MEM_FUNCTIONS)
1988 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1989 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1993 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1994 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1997 /* Now we have to build up the CALL_EXPR itself. */
1998 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1999 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2000 call_expr, arg_list, NULL_TREE);
2001 TREE_SIDE_EFFECTS (call_expr) = 1;
2003 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2005 /* If we are initializing a readonly value, show the above call clobbered
2006 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2007 the delay slot scheduler might overlook conflicts and take nasty
2009 if (RTX_UNCHANGING_P (dst))
2010 add_function_usage_to
2011 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2012 gen_rtx_CLOBBER (VOIDmode, dst),
2015 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2018 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2019 for the function we use for block copies. The first time FOR_CALL
2020 is true, we call assemble_external. */
2022 static GTY(()) tree block_move_fn;
2025 init_block_move_fn (const char *asmspec)
2031 if (TARGET_MEM_FUNCTIONS)
2033 fn = get_identifier ("memcpy");
2034 args = build_function_type_list (ptr_type_node, ptr_type_node,
2035 const_ptr_type_node, sizetype,
2040 fn = get_identifier ("bcopy");
2041 args = build_function_type_list (void_type_node, const_ptr_type_node,
2042 ptr_type_node, unsigned_type_node,
2046 fn = build_decl (FUNCTION_DECL, fn, args);
2047 DECL_EXTERNAL (fn) = 1;
2048 TREE_PUBLIC (fn) = 1;
2049 DECL_ARTIFICIAL (fn) = 1;
2050 TREE_NOTHROW (fn) = 1;
2057 SET_DECL_RTL (block_move_fn, NULL_RTX);
2058 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2063 emit_block_move_libcall_fn (int for_call)
2065 static bool emitted_extern;
2068 init_block_move_fn (NULL);
2070 if (for_call && !emitted_extern)
2072 emitted_extern = true;
2073 make_decl_rtl (block_move_fn, NULL);
2074 assemble_external (block_move_fn);
2077 return block_move_fn;
2080 /* A subroutine of emit_block_move. Copy the data via an explicit
2081 loop. This is used only when libcalls are forbidden. */
2082 /* ??? It'd be nice to copy in hunks larger than QImode. */
2085 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2086 unsigned int align ATTRIBUTE_UNUSED)
2088 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2089 enum machine_mode iter_mode;
2091 iter_mode = GET_MODE (size);
2092 if (iter_mode == VOIDmode)
2093 iter_mode = word_mode;
2095 top_label = gen_label_rtx ();
2096 cmp_label = gen_label_rtx ();
2097 iter = gen_reg_rtx (iter_mode);
2099 emit_move_insn (iter, const0_rtx);
2101 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2102 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2103 do_pending_stack_adjust ();
2105 emit_note (NOTE_INSN_LOOP_BEG);
2107 emit_jump (cmp_label);
2108 emit_label (top_label);
2110 tmp = convert_modes (Pmode, iter_mode, iter, true);
2111 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2112 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2113 x = change_address (x, QImode, x_addr);
2114 y = change_address (y, QImode, y_addr);
2116 emit_move_insn (x, y);
2118 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2119 true, OPTAB_LIB_WIDEN);
2121 emit_move_insn (iter, tmp);
2123 emit_note (NOTE_INSN_LOOP_CONT);
2124 emit_label (cmp_label);
2126 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2129 emit_note (NOTE_INSN_LOOP_END);
2132 /* Copy all or part of a value X into registers starting at REGNO.
2133 The number of registers to be filled is NREGS. */
2136 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2139 #ifdef HAVE_load_multiple
2147 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2148 x = validize_mem (force_const_mem (mode, x));
2150 /* See if the machine can do this with a load multiple insn. */
2151 #ifdef HAVE_load_multiple
2152 if (HAVE_load_multiple)
2154 last = get_last_insn ();
2155 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2163 delete_insns_since (last);
2167 for (i = 0; i < nregs; i++)
2168 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2169 operand_subword_force (x, i, mode));
2172 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2173 The number of registers to be filled is NREGS. */
2176 move_block_from_reg (int regno, rtx x, int nregs)
2183 /* See if the machine can do this with a store multiple insn. */
2184 #ifdef HAVE_store_multiple
2185 if (HAVE_store_multiple)
2187 rtx last = get_last_insn ();
2188 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2196 delete_insns_since (last);
2200 for (i = 0; i < nregs; i++)
2202 rtx tem = operand_subword (x, i, 1, BLKmode);
2207 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2211 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2212 ORIG, where ORIG is a non-consecutive group of registers represented by
2213 a PARALLEL. The clone is identical to the original except in that the
2214 original set of registers is replaced by a new set of pseudo registers.
2215 The new set has the same modes as the original set. */
2218 gen_group_rtx (rtx orig)
2223 if (GET_CODE (orig) != PARALLEL)
2226 length = XVECLEN (orig, 0);
2227 tmps = (rtx *) alloca (sizeof (rtx) * length);
2229 /* Skip a NULL entry in first slot. */
2230 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2235 for (; i < length; i++)
2237 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2238 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2240 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2243 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2246 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2247 where DST is non-consecutive registers represented by a PARALLEL.
2248 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2252 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2257 if (GET_CODE (dst) != PARALLEL)
2260 /* Check for a NULL entry, used to indicate that the parameter goes
2261 both on the stack and in registers. */
2262 if (XEXP (XVECEXP (dst, 0, 0), 0))
2267 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2269 /* Process the pieces. */
2270 for (i = start; i < XVECLEN (dst, 0); i++)
2272 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2273 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2274 unsigned int bytelen = GET_MODE_SIZE (mode);
2277 /* Handle trailing fragments that run over the size of the struct. */
2278 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2280 /* Arrange to shift the fragment to where it belongs.
2281 extract_bit_field loads to the lsb of the reg. */
2283 #ifdef BLOCK_REG_PADDING
2284 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2285 == (BYTES_BIG_ENDIAN ? upward : downward)
2290 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2291 bytelen = ssize - bytepos;
2296 /* If we won't be loading directly from memory, protect the real source
2297 from strange tricks we might play; but make sure that the source can
2298 be loaded directly into the destination. */
2300 if (GET_CODE (orig_src) != MEM
2301 && (!CONSTANT_P (orig_src)
2302 || (GET_MODE (orig_src) != mode
2303 && GET_MODE (orig_src) != VOIDmode)))
2305 if (GET_MODE (orig_src) == VOIDmode)
2306 src = gen_reg_rtx (mode);
2308 src = gen_reg_rtx (GET_MODE (orig_src));
2310 emit_move_insn (src, orig_src);
2313 /* Optimize the access just a bit. */
2314 if (GET_CODE (src) == MEM
2315 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2316 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2317 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2318 && bytelen == GET_MODE_SIZE (mode))
2320 tmps[i] = gen_reg_rtx (mode);
2321 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2323 else if (GET_CODE (src) == CONCAT)
2325 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2326 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2328 if ((bytepos == 0 && bytelen == slen0)
2329 || (bytepos != 0 && bytepos + bytelen <= slen))
2331 /* The following assumes that the concatenated objects all
2332 have the same size. In this case, a simple calculation
2333 can be used to determine the object and the bit field
2335 tmps[i] = XEXP (src, bytepos / slen0);
2336 if (! CONSTANT_P (tmps[i])
2337 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2338 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2339 (bytepos % slen0) * BITS_PER_UNIT,
2340 1, NULL_RTX, mode, mode, ssize);
2342 else if (bytepos == 0)
2344 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2345 emit_move_insn (mem, src);
2346 tmps[i] = adjust_address (mem, mode, 0);
2351 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2352 SIMD register, which is currently broken. While we get GCC
2353 to emit proper RTL for these cases, let's dump to memory. */
2354 else if (VECTOR_MODE_P (GET_MODE (dst))
2355 && GET_CODE (src) == REG)
2357 int slen = GET_MODE_SIZE (GET_MODE (src));
2360 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2361 emit_move_insn (mem, src);
2362 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2364 else if (CONSTANT_P (src)
2365 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2368 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2369 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2373 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2374 tmps[i], 0, OPTAB_WIDEN);
2379 /* Copy the extracted pieces into the proper (probable) hard regs. */
2380 for (i = start; i < XVECLEN (dst, 0); i++)
2381 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2384 /* Emit code to move a block SRC to block DST, where SRC and DST are
2385 non-consecutive groups of registers, each represented by a PARALLEL. */
2388 emit_group_move (rtx dst, rtx src)
2392 if (GET_CODE (src) != PARALLEL
2393 || GET_CODE (dst) != PARALLEL
2394 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2397 /* Skip first entry if NULL. */
2398 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2399 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2400 XEXP (XVECEXP (src, 0, i), 0));
2403 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2404 where SRC is non-consecutive registers represented by a PARALLEL.
2405 SSIZE represents the total size of block ORIG_DST, or -1 if not
2409 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2414 if (GET_CODE (src) != PARALLEL)
2417 /* Check for a NULL entry, used to indicate that the parameter goes
2418 both on the stack and in registers. */
2419 if (XEXP (XVECEXP (src, 0, 0), 0))
2424 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2426 /* Copy the (probable) hard regs into pseudos. */
2427 for (i = start; i < XVECLEN (src, 0); i++)
2429 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2430 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2431 emit_move_insn (tmps[i], reg);
2435 /* If we won't be storing directly into memory, protect the real destination
2436 from strange tricks we might play. */
2438 if (GET_CODE (dst) == PARALLEL)
2442 /* We can get a PARALLEL dst if there is a conditional expression in
2443 a return statement. In that case, the dst and src are the same,
2444 so no action is necessary. */
2445 if (rtx_equal_p (dst, src))
2448 /* It is unclear if we can ever reach here, but we may as well handle
2449 it. Allocate a temporary, and split this into a store/load to/from
2452 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2453 emit_group_store (temp, src, type, ssize);
2454 emit_group_load (dst, temp, type, ssize);
2457 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2459 dst = gen_reg_rtx (GET_MODE (orig_dst));
2460 /* Make life a bit easier for combine. */
2461 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2464 /* Process the pieces. */
2465 for (i = start; i < XVECLEN (src, 0); i++)
2467 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2468 enum machine_mode mode = GET_MODE (tmps[i]);
2469 unsigned int bytelen = GET_MODE_SIZE (mode);
2472 /* Handle trailing fragments that run over the size of the struct. */
2473 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2475 /* store_bit_field always takes its value from the lsb.
2476 Move the fragment to the lsb if it's not already there. */
2478 #ifdef BLOCK_REG_PADDING
2479 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2480 == (BYTES_BIG_ENDIAN ? upward : downward)
2486 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2487 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2488 tmps[i], 0, OPTAB_WIDEN);
2490 bytelen = ssize - bytepos;
2493 if (GET_CODE (dst) == CONCAT)
2495 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2496 dest = XEXP (dst, 0);
2497 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2499 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2500 dest = XEXP (dst, 1);
2502 else if (bytepos == 0 && XVECLEN (src, 0))
2504 dest = assign_stack_temp (GET_MODE (dest),
2505 GET_MODE_SIZE (GET_MODE (dest)), 0);
2506 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2515 /* Optimize the access just a bit. */
2516 if (GET_CODE (dest) == MEM
2517 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2518 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2519 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2520 && bytelen == GET_MODE_SIZE (mode))
2521 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2523 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2524 mode, tmps[i], ssize);
2529 /* Copy from the pseudo into the (probable) hard reg. */
2530 if (orig_dst != dst)
2531 emit_move_insn (orig_dst, dst);
2534 /* Generate code to copy a BLKmode object of TYPE out of a
2535 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2536 is null, a stack temporary is created. TGTBLK is returned.
2538 The primary purpose of this routine is to handle functions
2539 that return BLKmode structures in registers. Some machines
2540 (the PA for example) want to return all small structures
2541 in registers regardless of the structure's alignment. */
2544 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2546 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2547 rtx src = NULL, dst = NULL;
2548 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2549 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2553 tgtblk = assign_temp (build_qualified_type (type,
2555 | TYPE_QUAL_CONST)),
2557 preserve_temp_slots (tgtblk);
2560 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2561 into a new pseudo which is a full word. */
2563 if (GET_MODE (srcreg) != BLKmode
2564 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2565 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2567 /* Structures whose size is not a multiple of a word are aligned
2568 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2569 machine, this means we must skip the empty high order bytes when
2570 calculating the bit offset. */
2571 if (BYTES_BIG_ENDIAN
2572 && bytes % UNITS_PER_WORD)
2573 big_endian_correction
2574 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2576 /* Copy the structure BITSIZE bites at a time.
2578 We could probably emit more efficient code for machines which do not use
2579 strict alignment, but it doesn't seem worth the effort at the current
2581 for (bitpos = 0, xbitpos = big_endian_correction;
2582 bitpos < bytes * BITS_PER_UNIT;
2583 bitpos += bitsize, xbitpos += bitsize)
2585 /* We need a new source operand each time xbitpos is on a
2586 word boundary and when xbitpos == big_endian_correction
2587 (the first time through). */
2588 if (xbitpos % BITS_PER_WORD == 0
2589 || xbitpos == big_endian_correction)
2590 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2593 /* We need a new destination operand each time bitpos is on
2595 if (bitpos % BITS_PER_WORD == 0)
2596 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2598 /* Use xbitpos for the source extraction (right justified) and
2599 xbitpos for the destination store (left justified). */
2600 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2601 extract_bit_field (src, bitsize,
2602 xbitpos % BITS_PER_WORD, 1,
2603 NULL_RTX, word_mode, word_mode,
2611 /* Add a USE expression for REG to the (possibly empty) list pointed
2612 to by CALL_FUSAGE. REG must denote a hard register. */
2615 use_reg (rtx *call_fusage, rtx reg)
2617 if (GET_CODE (reg) != REG
2618 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2622 = gen_rtx_EXPR_LIST (VOIDmode,
2623 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2626 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2627 starting at REGNO. All of these registers must be hard registers. */
2630 use_regs (rtx *call_fusage, int regno, int nregs)
2634 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2637 for (i = 0; i < nregs; i++)
2638 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2641 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2642 PARALLEL REGS. This is for calls that pass values in multiple
2643 non-contiguous locations. The Irix 6 ABI has examples of this. */
2646 use_group_regs (rtx *call_fusage, rtx regs)
2650 for (i = 0; i < XVECLEN (regs, 0); i++)
2652 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2654 /* A NULL entry means the parameter goes both on the stack and in
2655 registers. This can also be a MEM for targets that pass values
2656 partially on the stack and partially in registers. */
2657 if (reg != 0 && GET_CODE (reg) == REG)
2658 use_reg (call_fusage, reg);
2663 /* Determine whether the LEN bytes generated by CONSTFUN can be
2664 stored to memory using several move instructions. CONSTFUNDATA is
2665 a pointer which will be passed as argument in every CONSTFUN call.
2666 ALIGN is maximum alignment we can assume. Return nonzero if a
2667 call to store_by_pieces should succeed. */
2670 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2671 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2672 void *constfundata, unsigned int align)
2674 unsigned HOST_WIDE_INT max_size, l;
2675 HOST_WIDE_INT offset = 0;
2676 enum machine_mode mode, tmode;
2677 enum insn_code icode;
2684 if (! STORE_BY_PIECES_P (len, align))
2687 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2688 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2689 align = MOVE_MAX * BITS_PER_UNIT;
2691 /* We would first store what we can in the largest integer mode, then go to
2692 successively smaller modes. */
2695 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2700 max_size = STORE_MAX_PIECES + 1;
2701 while (max_size > 1)
2703 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2704 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2705 if (GET_MODE_SIZE (tmode) < max_size)
2708 if (mode == VOIDmode)
2711 icode = mov_optab->handlers[(int) mode].insn_code;
2712 if (icode != CODE_FOR_nothing
2713 && align >= GET_MODE_ALIGNMENT (mode))
2715 unsigned int size = GET_MODE_SIZE (mode);
2722 cst = (*constfun) (constfundata, offset, mode);
2723 if (!LEGITIMATE_CONSTANT_P (cst))
2733 max_size = GET_MODE_SIZE (mode);
2736 /* The code above should have handled everything. */
2744 /* Generate several move instructions to store LEN bytes generated by
2745 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2746 pointer which will be passed as argument in every CONSTFUN call.
2747 ALIGN is maximum alignment we can assume.
2748 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2749 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2753 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2754 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2755 void *constfundata, unsigned int align, int endp)
2757 struct store_by_pieces data;
2766 if (! STORE_BY_PIECES_P (len, align))
2768 to = protect_from_queue (to, 1);
2769 data.constfun = constfun;
2770 data.constfundata = constfundata;
2773 store_by_pieces_1 (&data, align);
2784 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2785 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2787 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2790 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2797 to1 = adjust_address (data.to, QImode, data.offset);
2805 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2806 rtx with BLKmode). The caller must pass TO through protect_from_queue
2807 before calling. ALIGN is maximum alignment we can assume. */
2810 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2812 struct store_by_pieces data;
2817 data.constfun = clear_by_pieces_1;
2818 data.constfundata = NULL;
2821 store_by_pieces_1 (&data, align);
2824 /* Callback routine for clear_by_pieces.
2825 Return const0_rtx unconditionally. */
2828 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2829 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2830 enum machine_mode mode ATTRIBUTE_UNUSED)
2835 /* Subroutine of clear_by_pieces and store_by_pieces.
2836 Generate several move instructions to store LEN bytes of block TO. (A MEM
2837 rtx with BLKmode). The caller must pass TO through protect_from_queue
2838 before calling. ALIGN is maximum alignment we can assume. */
2841 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2842 unsigned int align ATTRIBUTE_UNUSED)
2844 rtx to_addr = XEXP (data->to, 0);
2845 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2846 enum machine_mode mode = VOIDmode, tmode;
2847 enum insn_code icode;
2850 data->to_addr = to_addr;
2852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2855 data->explicit_inc_to = 0;
2857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2859 data->offset = data->len;
2861 /* If storing requires more than two move insns,
2862 copy addresses to registers (to make displacements shorter)
2863 and use post-increment if available. */
2864 if (!data->autinc_to
2865 && move_by_pieces_ninsns (data->len, align) > 2)
2867 /* Determine the main mode we'll be using. */
2868 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2869 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2870 if (GET_MODE_SIZE (tmode) < max_size)
2873 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2875 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2876 data->autinc_to = 1;
2877 data->explicit_inc_to = -1;
2880 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2881 && ! data->autinc_to)
2883 data->to_addr = copy_addr_to_reg (to_addr);
2884 data->autinc_to = 1;
2885 data->explicit_inc_to = 1;
2888 if ( !data->autinc_to && CONSTANT_P (to_addr))
2889 data->to_addr = copy_addr_to_reg (to_addr);
2892 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2893 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2894 align = MOVE_MAX * BITS_PER_UNIT;
2896 /* First store what we can in the largest integer mode, then go to
2897 successively smaller modes. */
2899 while (max_size > 1)
2901 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2902 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2903 if (GET_MODE_SIZE (tmode) < max_size)
2906 if (mode == VOIDmode)
2909 icode = mov_optab->handlers[(int) mode].insn_code;
2910 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2911 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2913 max_size = GET_MODE_SIZE (mode);
2916 /* The code above should have handled everything. */
2921 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2922 with move instructions for mode MODE. GENFUN is the gen_... function
2923 to make a move insn for that mode. DATA has all the other info. */
2926 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2927 struct store_by_pieces *data)
2929 unsigned int size = GET_MODE_SIZE (mode);
2932 while (data->len >= size)
2935 data->offset -= size;
2937 if (data->autinc_to)
2938 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2941 to1 = adjust_address (data->to, mode, data->offset);
2943 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2944 emit_insn (gen_add2_insn (data->to_addr,
2945 GEN_INT (-(HOST_WIDE_INT) size)));
2947 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2948 emit_insn ((*genfun) (to1, cst));
2950 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2951 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2953 if (! data->reverse)
2954 data->offset += size;
2960 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2961 its length in bytes. */
2964 clear_storage (rtx object, rtx size)
2967 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2968 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2970 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2971 just move a zero. Otherwise, do this a piece at a time. */
2972 if (GET_MODE (object) != BLKmode
2973 && GET_CODE (size) == CONST_INT
2974 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2975 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2978 object = protect_from_queue (object, 1);
2979 size = protect_from_queue (size, 0);
2981 if (size == const0_rtx)
2983 else if (GET_CODE (size) == CONST_INT
2984 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2985 clear_by_pieces (object, INTVAL (size), align);
2986 else if (clear_storage_via_clrstr (object, size, align))
2989 retval = clear_storage_via_libcall (object, size);
2995 /* A subroutine of clear_storage. Expand a clrstr pattern;
2996 return true if successful. */
2999 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
3001 /* Try the most limited insn first, because there's no point
3002 including more than one in the machine description unless
3003 the more limited one has some advantage. */
3005 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3006 enum machine_mode mode;
3008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3009 mode = GET_MODE_WIDER_MODE (mode))
3011 enum insn_code code = clrstr_optab[(int) mode];
3012 insn_operand_predicate_fn pred;
3014 if (code != CODE_FOR_nothing
3015 /* We don't need MODE to be narrower than
3016 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3017 the mode mask, as it is returned by the macro, it will
3018 definitely be less than the actual mode mask. */
3019 && ((GET_CODE (size) == CONST_INT
3020 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3021 <= (GET_MODE_MASK (mode) >> 1)))
3022 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3023 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3024 || (*pred) (object, BLKmode))
3025 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3026 || (*pred) (opalign, VOIDmode)))
3029 rtx last = get_last_insn ();
3032 op1 = convert_to_mode (mode, size, 1);
3033 pred = insn_data[(int) code].operand[1].predicate;
3034 if (pred != 0 && ! (*pred) (op1, mode))
3035 op1 = copy_to_mode_reg (mode, op1);
3037 pat = GEN_FCN ((int) code) (object, op1, opalign);
3044 delete_insns_since (last);
3051 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3052 Return the return value of memset, 0 otherwise. */
3055 clear_storage_via_libcall (rtx object, rtx size)
3057 tree call_expr, arg_list, fn, object_tree, size_tree;
3058 enum machine_mode size_mode;
3061 /* OBJECT or SIZE may have been passed through protect_from_queue.
3063 It is unsafe to save the value generated by protect_from_queue
3064 and reuse it later. Consider what happens if emit_queue is
3065 called before the return value from protect_from_queue is used.
3067 Expansion of the CALL_EXPR below will call emit_queue before
3068 we are finished emitting RTL for argument setup. So if we are
3069 not careful we could get the wrong value for an argument.
3071 To avoid this problem we go ahead and emit code to copy OBJECT
3072 and SIZE into new pseudos. We can then place those new pseudos
3073 into an RTL_EXPR and use them later, even after a call to
3076 Note this is not strictly needed for library calls since they
3077 do not call emit_queue before loading their arguments. However,
3078 we may need to have library calls call emit_queue in the future
3079 since failing to do so could cause problems for targets which
3080 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3082 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3084 if (TARGET_MEM_FUNCTIONS)
3085 size_mode = TYPE_MODE (sizetype);
3087 size_mode = TYPE_MODE (unsigned_type_node);
3088 size = convert_to_mode (size_mode, size, 1);
3089 size = copy_to_mode_reg (size_mode, size);
3091 /* It is incorrect to use the libcall calling conventions to call
3092 memset in this context. This could be a user call to memset and
3093 the user may wish to examine the return value from memset. For
3094 targets where libcalls and normal calls have different conventions
3095 for returning pointers, we could end up generating incorrect code.
3097 For convenience, we generate the call to bzero this way as well. */
3099 object_tree = make_tree (ptr_type_node, object);
3100 if (TARGET_MEM_FUNCTIONS)
3101 size_tree = make_tree (sizetype, size);
3103 size_tree = make_tree (unsigned_type_node, size);
3105 fn = clear_storage_libcall_fn (true);
3106 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3107 if (TARGET_MEM_FUNCTIONS)
3108 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3109 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3111 /* Now we have to build up the CALL_EXPR itself. */
3112 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3113 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3114 call_expr, arg_list, NULL_TREE);
3115 TREE_SIDE_EFFECTS (call_expr) = 1;
3117 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3119 /* If we are initializing a readonly value, show the above call
3120 clobbered it. Otherwise, a load from it may erroneously be
3121 hoisted from a loop. */
3122 if (RTX_UNCHANGING_P (object))
3123 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3125 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3128 /* A subroutine of clear_storage_via_libcall. Create the tree node
3129 for the function we use for block clears. The first time FOR_CALL
3130 is true, we call assemble_external. */
3132 static GTY(()) tree block_clear_fn;
3135 init_block_clear_fn (const char *asmspec)
3137 if (!block_clear_fn)
3141 if (TARGET_MEM_FUNCTIONS)
3143 fn = get_identifier ("memset");
3144 args = build_function_type_list (ptr_type_node, ptr_type_node,
3145 integer_type_node, sizetype,
3150 fn = get_identifier ("bzero");
3151 args = build_function_type_list (void_type_node, ptr_type_node,
3152 unsigned_type_node, NULL_TREE);
3155 fn = build_decl (FUNCTION_DECL, fn, args);
3156 DECL_EXTERNAL (fn) = 1;
3157 TREE_PUBLIC (fn) = 1;
3158 DECL_ARTIFICIAL (fn) = 1;
3159 TREE_NOTHROW (fn) = 1;
3161 block_clear_fn = fn;
3166 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3167 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3172 clear_storage_libcall_fn (int for_call)
3174 static bool emitted_extern;
3176 if (!block_clear_fn)
3177 init_block_clear_fn (NULL);
3179 if (for_call && !emitted_extern)
3181 emitted_extern = true;
3182 make_decl_rtl (block_clear_fn, NULL);
3183 assemble_external (block_clear_fn);
3186 return block_clear_fn;
3189 /* Generate code to copy Y into X.
3190 Both Y and X must have the same mode, except that
3191 Y can be a constant with VOIDmode.
3192 This mode cannot be BLKmode; use emit_block_move for that.
3194 Return the last instruction emitted. */
3197 emit_move_insn (rtx x, rtx y)
3199 enum machine_mode mode = GET_MODE (x);
3200 rtx y_cst = NULL_RTX;
3203 x = protect_from_queue (x, 1);
3204 y = protect_from_queue (y, 0);
3206 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3209 /* Never force constant_p_rtx to memory. */
3210 if (GET_CODE (y) == CONSTANT_P_RTX)
3212 else if (CONSTANT_P (y))
3215 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3216 && (last_insn = compress_float_constant (x, y)))
3221 if (!LEGITIMATE_CONSTANT_P (y))
3223 y = force_const_mem (mode, y);
3225 /* If the target's cannot_force_const_mem prevented the spill,
3226 assume that the target's move expanders will also take care
3227 of the non-legitimate constant. */
3233 /* If X or Y are memory references, verify that their addresses are valid
3235 if (GET_CODE (x) == MEM
3236 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3237 && ! push_operand (x, GET_MODE (x)))
3239 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3240 x = validize_mem (x);
3242 if (GET_CODE (y) == MEM
3243 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3245 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3246 y = validize_mem (y);
3248 if (mode == BLKmode)
3251 last_insn = emit_move_insn_1 (x, y);
3253 if (y_cst && GET_CODE (x) == REG
3254 && (set = single_set (last_insn)) != NULL_RTX
3255 && SET_DEST (set) == x
3256 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3257 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3262 /* Low level part of emit_move_insn.
3263 Called just like emit_move_insn, but assumes X and Y
3264 are basically valid. */
3267 emit_move_insn_1 (rtx x, rtx y)
3269 enum machine_mode mode = GET_MODE (x);
3270 enum machine_mode submode;
3271 enum mode_class class = GET_MODE_CLASS (mode);
3273 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3276 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3278 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3280 /* Expand complex moves by moving real part and imag part, if possible. */
3281 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3282 && BLKmode != (submode = GET_MODE_INNER (mode))
3283 && (mov_optab->handlers[(int) submode].insn_code
3284 != CODE_FOR_nothing))
3286 /* Don't split destination if it is a stack push. */
3287 int stack = push_operand (x, GET_MODE (x));
3289 #ifdef PUSH_ROUNDING
3290 /* In case we output to the stack, but the size is smaller than the
3291 machine can push exactly, we need to use move instructions. */
3293 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3294 != GET_MODE_SIZE (submode)))
3297 HOST_WIDE_INT offset1, offset2;
3299 /* Do not use anti_adjust_stack, since we don't want to update
3300 stack_pointer_delta. */
3301 temp = expand_binop (Pmode,
3302 #ifdef STACK_GROWS_DOWNWARD
3310 (GET_MODE_SIZE (GET_MODE (x)))),
3311 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3313 if (temp != stack_pointer_rtx)
3314 emit_move_insn (stack_pointer_rtx, temp);
3316 #ifdef STACK_GROWS_DOWNWARD
3318 offset2 = GET_MODE_SIZE (submode);
3320 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3321 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3322 + GET_MODE_SIZE (submode));
3325 emit_move_insn (change_address (x, submode,
3326 gen_rtx_PLUS (Pmode,
3328 GEN_INT (offset1))),
3329 gen_realpart (submode, y));
3330 emit_move_insn (change_address (x, submode,
3331 gen_rtx_PLUS (Pmode,
3333 GEN_INT (offset2))),
3334 gen_imagpart (submode, y));
3338 /* If this is a stack, push the highpart first, so it
3339 will be in the argument order.
3341 In that case, change_address is used only to convert
3342 the mode, not to change the address. */
3345 /* Note that the real part always precedes the imag part in memory
3346 regardless of machine's endianness. */
3347 #ifdef STACK_GROWS_DOWNWARD
3348 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3349 gen_imagpart (submode, y));
3350 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3351 gen_realpart (submode, y));
3353 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3354 gen_realpart (submode, y));
3355 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3356 gen_imagpart (submode, y));
3361 rtx realpart_x, realpart_y;
3362 rtx imagpart_x, imagpart_y;
3364 /* If this is a complex value with each part being smaller than a
3365 word, the usual calling sequence will likely pack the pieces into
3366 a single register. Unfortunately, SUBREG of hard registers only
3367 deals in terms of words, so we have a problem converting input
3368 arguments to the CONCAT of two registers that is used elsewhere
3369 for complex values. If this is before reload, we can copy it into
3370 memory and reload. FIXME, we should see about using extract and
3371 insert on integer registers, but complex short and complex char
3372 variables should be rarely used. */
3373 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3374 && (reload_in_progress | reload_completed) == 0)
3377 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3379 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3381 if (packed_dest_p || packed_src_p)
3383 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3384 ? MODE_FLOAT : MODE_INT);
3386 enum machine_mode reg_mode
3387 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3389 if (reg_mode != BLKmode)
3391 rtx mem = assign_stack_temp (reg_mode,
3392 GET_MODE_SIZE (mode), 0);
3393 rtx cmem = adjust_address (mem, mode, 0);
3396 = N_("function using short complex types cannot be inline");
3400 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3402 emit_move_insn_1 (cmem, y);
3403 return emit_move_insn_1 (sreg, mem);
3407 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3409 emit_move_insn_1 (mem, sreg);
3410 return emit_move_insn_1 (x, cmem);
3416 realpart_x = gen_realpart (submode, x);
3417 realpart_y = gen_realpart (submode, y);
3418 imagpart_x = gen_imagpart (submode, x);
3419 imagpart_y = gen_imagpart (submode, y);
3421 /* Show the output dies here. This is necessary for SUBREGs
3422 of pseudos since we cannot track their lifetimes correctly;
3423 hard regs shouldn't appear here except as return values.
3424 We never want to emit such a clobber after reload. */
3426 && ! (reload_in_progress || reload_completed)
3427 && (GET_CODE (realpart_x) == SUBREG
3428 || GET_CODE (imagpart_x) == SUBREG))
3429 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3431 emit_move_insn (realpart_x, realpart_y);
3432 emit_move_insn (imagpart_x, imagpart_y);
3435 return get_last_insn ();
3438 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3439 find a mode to do it in. If we have a movcc, use it. Otherwise,
3440 find the MODE_INT mode of the same width. */
3441 else if (GET_MODE_CLASS (mode) == MODE_CC
3442 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3444 enum insn_code insn_code;
3445 enum machine_mode tmode = VOIDmode;
3449 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3452 for (tmode = QImode; tmode != VOIDmode;
3453 tmode = GET_MODE_WIDER_MODE (tmode))
3454 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3457 if (tmode == VOIDmode)
3460 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3461 may call change_address which is not appropriate if we were
3462 called when a reload was in progress. We don't have to worry
3463 about changing the address since the size in bytes is supposed to
3464 be the same. Copy the MEM to change the mode and move any
3465 substitutions from the old MEM to the new one. */
3467 if (reload_in_progress)
3469 x = gen_lowpart_common (tmode, x1);
3470 if (x == 0 && GET_CODE (x1) == MEM)
3472 x = adjust_address_nv (x1, tmode, 0);
3473 copy_replacements (x1, x);
3476 y = gen_lowpart_common (tmode, y1);
3477 if (y == 0 && GET_CODE (y1) == MEM)
3479 y = adjust_address_nv (y1, tmode, 0);
3480 copy_replacements (y1, y);
3485 x = gen_lowpart (tmode, x);
3486 y = gen_lowpart (tmode, y);
3489 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3490 return emit_insn (GEN_FCN (insn_code) (x, y));
3493 /* This will handle any multi-word or full-word mode that lacks a move_insn
3494 pattern. However, you will get better code if you define such patterns,
3495 even if they must turn into multiple assembler instructions. */
3496 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3503 #ifdef PUSH_ROUNDING
3505 /* If X is a push on the stack, do the push now and replace
3506 X with a reference to the stack pointer. */
3507 if (push_operand (x, GET_MODE (x)))
3512 /* Do not use anti_adjust_stack, since we don't want to update
3513 stack_pointer_delta. */
3514 temp = expand_binop (Pmode,
3515 #ifdef STACK_GROWS_DOWNWARD
3523 (GET_MODE_SIZE (GET_MODE (x)))),
3524 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3526 if (temp != stack_pointer_rtx)
3527 emit_move_insn (stack_pointer_rtx, temp);
3529 code = GET_CODE (XEXP (x, 0));
3531 /* Just hope that small offsets off SP are OK. */
3532 if (code == POST_INC)
3533 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3534 GEN_INT (-((HOST_WIDE_INT)
3535 GET_MODE_SIZE (GET_MODE (x)))));
3536 else if (code == POST_DEC)
3537 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3538 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3540 temp = stack_pointer_rtx;
3542 x = change_address (x, VOIDmode, temp);
3546 /* If we are in reload, see if either operand is a MEM whose address
3547 is scheduled for replacement. */
3548 if (reload_in_progress && GET_CODE (x) == MEM
3549 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3550 x = replace_equiv_address_nv (x, inner);
3551 if (reload_in_progress && GET_CODE (y) == MEM
3552 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3553 y = replace_equiv_address_nv (y, inner);
3559 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3562 rtx xpart = operand_subword (x, i, 1, mode);
3563 rtx ypart = operand_subword (y, i, 1, mode);
3565 /* If we can't get a part of Y, put Y into memory if it is a
3566 constant. Otherwise, force it into a register. If we still
3567 can't get a part of Y, abort. */
3568 if (ypart == 0 && CONSTANT_P (y))
3570 y = force_const_mem (mode, y);
3571 ypart = operand_subword (y, i, 1, mode);
3573 else if (ypart == 0)
3574 ypart = operand_subword_force (y, i, mode);
3576 if (xpart == 0 || ypart == 0)
3579 need_clobber |= (GET_CODE (xpart) == SUBREG);
3581 last_insn = emit_move_insn (xpart, ypart);
3587 /* Show the output dies here. This is necessary for SUBREGs
3588 of pseudos since we cannot track their lifetimes correctly;
3589 hard regs shouldn't appear here except as return values.
3590 We never want to emit such a clobber after reload. */
3592 && ! (reload_in_progress || reload_completed)
3593 && need_clobber != 0)
3594 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3604 /* If Y is representable exactly in a narrower mode, and the target can
3605 perform the extension directly from constant or memory, then emit the
3606 move as an extension. */
3609 compress_float_constant (rtx x, rtx y)
3611 enum machine_mode dstmode = GET_MODE (x);
3612 enum machine_mode orig_srcmode = GET_MODE (y);
3613 enum machine_mode srcmode;
3616 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3618 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3619 srcmode != orig_srcmode;
3620 srcmode = GET_MODE_WIDER_MODE (srcmode))
3623 rtx trunc_y, last_insn;
3625 /* Skip if the target can't extend this way. */
3626 ic = can_extend_p (dstmode, srcmode, 0);
3627 if (ic == CODE_FOR_nothing)
3630 /* Skip if the narrowed value isn't exact. */
3631 if (! exact_real_truncate (srcmode, &r))
3634 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3636 if (LEGITIMATE_CONSTANT_P (trunc_y))
3638 /* Skip if the target needs extra instructions to perform
3640 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3643 else if (float_extend_from_mem[dstmode][srcmode])
3644 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3648 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3649 last_insn = get_last_insn ();
3651 if (GET_CODE (x) == REG)
3652 set_unique_reg_note (last_insn, REG_EQUAL, y);
3660 /* Pushing data onto the stack. */
3662 /* Push a block of length SIZE (perhaps variable)
3663 and return an rtx to address the beginning of the block.
3664 Note that it is not possible for the value returned to be a QUEUED.
3665 The value may be virtual_outgoing_args_rtx.
3667 EXTRA is the number of bytes of padding to push in addition to SIZE.
3668 BELOW nonzero means this padding comes at low addresses;
3669 otherwise, the padding comes at high addresses. */
3672 push_block (rtx size, int extra, int below)
3676 size = convert_modes (Pmode, ptr_mode, size, 1);
3677 if (CONSTANT_P (size))
3678 anti_adjust_stack (plus_constant (size, extra));
3679 else if (GET_CODE (size) == REG && extra == 0)
3680 anti_adjust_stack (size);
3683 temp = copy_to_mode_reg (Pmode, size);
3685 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3686 temp, 0, OPTAB_LIB_WIDEN);
3687 anti_adjust_stack (temp);
3690 #ifndef STACK_GROWS_DOWNWARD
3696 temp = virtual_outgoing_args_rtx;
3697 if (extra != 0 && below)
3698 temp = plus_constant (temp, extra);
3702 if (GET_CODE (size) == CONST_INT)
3703 temp = plus_constant (virtual_outgoing_args_rtx,
3704 -INTVAL (size) - (below ? 0 : extra));
3705 else if (extra != 0 && !below)
3706 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3707 negate_rtx (Pmode, plus_constant (size, extra)));
3709 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3710 negate_rtx (Pmode, size));
3713 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3716 #ifdef PUSH_ROUNDING
3718 /* Emit single push insn. */
3721 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3724 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3726 enum insn_code icode;
3727 insn_operand_predicate_fn pred;
3729 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3730 /* If there is push pattern, use it. Otherwise try old way of throwing
3731 MEM representing push operation to move expander. */
3732 icode = push_optab->handlers[(int) mode].insn_code;
3733 if (icode != CODE_FOR_nothing)
3735 if (((pred = insn_data[(int) icode].operand[0].predicate)
3736 && !((*pred) (x, mode))))
3737 x = force_reg (mode, x);
3738 emit_insn (GEN_FCN (icode) (x));
3741 if (GET_MODE_SIZE (mode) == rounded_size)
3742 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3743 /* If we are to pad downward, adjust the stack pointer first and
3744 then store X into the stack location using an offset. This is
3745 because emit_move_insn does not know how to pad; it does not have
3747 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3749 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3750 HOST_WIDE_INT offset;
3752 emit_move_insn (stack_pointer_rtx,
3753 expand_binop (Pmode,
3754 #ifdef STACK_GROWS_DOWNWARD
3760 GEN_INT (rounded_size),
3761 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3763 offset = (HOST_WIDE_INT) padding_size;
3764 #ifdef STACK_GROWS_DOWNWARD
3765 if (STACK_PUSH_CODE == POST_DEC)
3766 /* We have already decremented the stack pointer, so get the
3768 offset += (HOST_WIDE_INT) rounded_size;
3770 if (STACK_PUSH_CODE == POST_INC)
3771 /* We have already incremented the stack pointer, so get the
3773 offset -= (HOST_WIDE_INT) rounded_size;
3775 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3779 #ifdef STACK_GROWS_DOWNWARD
3780 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3781 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3782 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3784 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3785 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3786 GEN_INT (rounded_size));
3788 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3791 dest = gen_rtx_MEM (mode, dest_addr);
3795 set_mem_attributes (dest, type, 1);
3797 if (flag_optimize_sibling_calls)
3798 /* Function incoming arguments may overlap with sibling call
3799 outgoing arguments and we cannot allow reordering of reads
3800 from function arguments with stores to outgoing arguments
3801 of sibling calls. */
3802 set_mem_alias_set (dest, 0);
3804 emit_move_insn (dest, x);
3808 /* Generate code to push X onto the stack, assuming it has mode MODE and
3810 MODE is redundant except when X is a CONST_INT (since they don't
3812 SIZE is an rtx for the size of data to be copied (in bytes),
3813 needed only if X is BLKmode.
3815 ALIGN (in bits) is maximum alignment we can assume.
3817 If PARTIAL and REG are both nonzero, then copy that many of the first
3818 words of X into registers starting with REG, and push the rest of X.
3819 The amount of space pushed is decreased by PARTIAL words,
3820 rounded *down* to a multiple of PARM_BOUNDARY.
3821 REG must be a hard register in this case.
3822 If REG is zero but PARTIAL is not, take any all others actions for an
3823 argument partially in registers, but do not actually load any
3826 EXTRA is the amount in bytes of extra space to leave next to this arg.
3827 This is ignored if an argument block has already been allocated.
3829 On a machine that lacks real push insns, ARGS_ADDR is the address of
3830 the bottom of the argument block for this call. We use indexing off there
3831 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3832 argument block has not been preallocated.
3834 ARGS_SO_FAR is the size of args previously pushed for this call.
3836 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3837 for arguments passed in registers. If nonzero, it will be the number
3838 of bytes required. */
3841 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3842 unsigned int align, int partial, rtx reg, int extra,
3843 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3847 enum direction stack_direction
3848 #ifdef STACK_GROWS_DOWNWARD
3854 /* Decide where to pad the argument: `downward' for below,
3855 `upward' for above, or `none' for don't pad it.
3856 Default is below for small data on big-endian machines; else above. */
3857 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3859 /* Invert direction if stack is post-decrement.
3861 if (STACK_PUSH_CODE == POST_DEC)
3862 if (where_pad != none)
3863 where_pad = (where_pad == downward ? upward : downward);
3865 xinner = x = protect_from_queue (x, 0);
3867 if (mode == BLKmode)
3869 /* Copy a block into the stack, entirely or partially. */
3872 int used = partial * UNITS_PER_WORD;
3873 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3881 /* USED is now the # of bytes we need not copy to the stack
3882 because registers will take care of them. */
3885 xinner = adjust_address (xinner, BLKmode, used);
3887 /* If the partial register-part of the arg counts in its stack size,
3888 skip the part of stack space corresponding to the registers.
3889 Otherwise, start copying to the beginning of the stack space,
3890 by setting SKIP to 0. */
3891 skip = (reg_parm_stack_space == 0) ? 0 : used;
3893 #ifdef PUSH_ROUNDING
3894 /* Do it with several push insns if that doesn't take lots of insns
3895 and if there is no difficulty with push insns that skip bytes
3896 on the stack for alignment purposes. */
3899 && GET_CODE (size) == CONST_INT
3901 && MEM_ALIGN (xinner) >= align
3902 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3903 /* Here we avoid the case of a structure whose weak alignment
3904 forces many pushes of a small amount of data,
3905 and such small pushes do rounding that causes trouble. */
3906 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3907 || align >= BIGGEST_ALIGNMENT
3908 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3909 == (align / BITS_PER_UNIT)))
3910 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3912 /* Push padding now if padding above and stack grows down,
3913 or if padding below and stack grows up.
3914 But if space already allocated, this has already been done. */
3915 if (extra && args_addr == 0
3916 && where_pad != none && where_pad != stack_direction)
3917 anti_adjust_stack (GEN_INT (extra));
3919 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3922 #endif /* PUSH_ROUNDING */
3926 /* Otherwise make space on the stack and copy the data
3927 to the address of that space. */
3929 /* Deduct words put into registers from the size we must copy. */
3932 if (GET_CODE (size) == CONST_INT)
3933 size = GEN_INT (INTVAL (size) - used);
3935 size = expand_binop (GET_MODE (size), sub_optab, size,
3936 GEN_INT (used), NULL_RTX, 0,
3940 /* Get the address of the stack space.
3941 In this case, we do not deal with EXTRA separately.
3942 A single stack adjust will do. */
3945 temp = push_block (size, extra, where_pad == downward);
3948 else if (GET_CODE (args_so_far) == CONST_INT)
3949 temp = memory_address (BLKmode,
3950 plus_constant (args_addr,
3951 skip + INTVAL (args_so_far)));
3953 temp = memory_address (BLKmode,
3954 plus_constant (gen_rtx_PLUS (Pmode,
3959 if (!ACCUMULATE_OUTGOING_ARGS)
3961 /* If the source is referenced relative to the stack pointer,
3962 copy it to another register to stabilize it. We do not need
3963 to do this if we know that we won't be changing sp. */
3965 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3966 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3967 temp = copy_to_reg (temp);
3970 target = gen_rtx_MEM (BLKmode, temp);
3974 set_mem_attributes (target, type, 1);
3975 /* Function incoming arguments may overlap with sibling call
3976 outgoing arguments and we cannot allow reordering of reads
3977 from function arguments with stores to outgoing arguments
3978 of sibling calls. */
3979 set_mem_alias_set (target, 0);
3982 /* ALIGN may well be better aligned than TYPE, e.g. due to
3983 PARM_BOUNDARY. Assume the caller isn't lying. */
3984 set_mem_align (target, align);
3986 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3989 else if (partial > 0)
3991 /* Scalar partly in registers. */
3993 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3996 /* # words of start of argument
3997 that we must make space for but need not store. */
3998 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3999 int args_offset = INTVAL (args_so_far);
4002 /* Push padding now if padding above and stack grows down,
4003 or if padding below and stack grows up.
4004 But if space already allocated, this has already been done. */
4005 if (extra && args_addr == 0
4006 && where_pad != none && where_pad != stack_direction)
4007 anti_adjust_stack (GEN_INT (extra));
4009 /* If we make space by pushing it, we might as well push
4010 the real data. Otherwise, we can leave OFFSET nonzero
4011 and leave the space uninitialized. */
4015 /* Now NOT_STACK gets the number of words that we don't need to
4016 allocate on the stack. */
4017 not_stack = partial - offset;
4019 /* If the partial register-part of the arg counts in its stack size,
4020 skip the part of stack space corresponding to the registers.
4021 Otherwise, start copying to the beginning of the stack space,
4022 by setting SKIP to 0. */
4023 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4025 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4026 x = validize_mem (force_const_mem (mode, x));
4028 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4029 SUBREGs of such registers are not allowed. */
4030 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4031 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4032 x = copy_to_reg (x);
4034 /* Loop over all the words allocated on the stack for this arg. */
4035 /* We can do it by words, because any scalar bigger than a word
4036 has a size a multiple of a word. */
4037 #ifndef PUSH_ARGS_REVERSED
4038 for (i = not_stack; i < size; i++)
4040 for (i = size - 1; i >= not_stack; i--)
4042 if (i >= not_stack + offset)
4043 emit_push_insn (operand_subword_force (x, i, mode),
4044 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4046 GEN_INT (args_offset + ((i - not_stack + skip)
4048 reg_parm_stack_space, alignment_pad);
4055 /* Push padding now if padding above and stack grows down,
4056 or if padding below and stack grows up.
4057 But if space already allocated, this has already been done. */
4058 if (extra && args_addr == 0
4059 && where_pad != none && where_pad != stack_direction)
4060 anti_adjust_stack (GEN_INT (extra));
4062 #ifdef PUSH_ROUNDING
4063 if (args_addr == 0 && PUSH_ARGS)
4064 emit_single_push_insn (mode, x, type);
4068 if (GET_CODE (args_so_far) == CONST_INT)
4070 = memory_address (mode,
4071 plus_constant (args_addr,
4072 INTVAL (args_so_far)));
4074 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4076 dest = gen_rtx_MEM (mode, addr);
4079 set_mem_attributes (dest, type, 1);
4080 /* Function incoming arguments may overlap with sibling call
4081 outgoing arguments and we cannot allow reordering of reads
4082 from function arguments with stores to outgoing arguments
4083 of sibling calls. */
4084 set_mem_alias_set (dest, 0);
4087 emit_move_insn (dest, x);
4091 /* If part should go in registers, copy that part
4092 into the appropriate registers. Do this now, at the end,
4093 since mem-to-mem copies above may do function calls. */
4094 if (partial > 0 && reg != 0)
4096 /* Handle calls that pass values in multiple non-contiguous locations.
4097 The Irix 6 ABI has examples of this. */
4098 if (GET_CODE (reg) == PARALLEL)
4099 emit_group_load (reg, x, type, -1);
4101 move_block_to_reg (REGNO (reg), x, partial, mode);
4104 if (extra && args_addr == 0 && where_pad == stack_direction)
4105 anti_adjust_stack (GEN_INT (extra));
4107 if (alignment_pad && args_addr == 0)
4108 anti_adjust_stack (alignment_pad);
4111 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4115 get_subtarget (rtx x)
4118 /* Only registers can be subtargets. */
4119 || GET_CODE (x) != REG
4120 /* If the register is readonly, it can't be set more than once. */
4121 || RTX_UNCHANGING_P (x)
4122 /* Don't use hard regs to avoid extending their life. */
4123 || REGNO (x) < FIRST_PSEUDO_REGISTER
4124 /* Avoid subtargets inside loops,
4125 since they hide some invariant expressions. */
4126 || preserve_subexpressions_p ())
4130 /* Expand an assignment that stores the value of FROM into TO.
4131 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4132 (This may contain a QUEUED rtx;
4133 if the value is constant, this rtx is a constant.)
4134 Otherwise, the returned value is NULL_RTX. */
4137 expand_assignment (tree to, tree from, int want_value)
4142 /* Don't crash if the lhs of the assignment was erroneous. */
4144 if (TREE_CODE (to) == ERROR_MARK)
4146 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4147 return want_value ? result : NULL_RTX;
4150 /* Assignment of a structure component needs special treatment
4151 if the structure component's rtx is not simply a MEM.
4152 Assignment of an array element at a constant index, and assignment of
4153 an array element in an unaligned packed structure field, has the same
4156 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4157 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4158 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4160 enum machine_mode mode1;
4161 HOST_WIDE_INT bitsize, bitpos;
4169 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4170 &unsignedp, &volatilep);
4172 /* If we are going to use store_bit_field and extract_bit_field,
4173 make sure to_rtx will be safe for multiple use. */
4175 if (mode1 == VOIDmode && want_value)
4176 tem = stabilize_reference (tem);
4178 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4182 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4184 if (GET_CODE (to_rtx) != MEM)
4187 #ifdef POINTERS_EXTEND_UNSIGNED
4188 if (GET_MODE (offset_rtx) != Pmode)
4189 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4191 if (GET_MODE (offset_rtx) != ptr_mode)
4192 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4195 /* A constant address in TO_RTX can have VOIDmode, we must not try
4196 to call force_reg for that case. Avoid that case. */
4197 if (GET_CODE (to_rtx) == MEM
4198 && GET_MODE (to_rtx) == BLKmode
4199 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4201 && (bitpos % bitsize) == 0
4202 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4203 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4205 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4209 to_rtx = offset_address (to_rtx, offset_rtx,
4210 highest_pow2_factor_for_type (TREE_TYPE (to),
4214 if (GET_CODE (to_rtx) == MEM)
4216 /* If the field is at offset zero, we could have been given the
4217 DECL_RTX of the parent struct. Don't munge it. */
4218 to_rtx = shallow_copy_rtx (to_rtx);
4220 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4223 /* Deal with volatile and readonly fields. The former is only done
4224 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4225 if (volatilep && GET_CODE (to_rtx) == MEM)
4227 if (to_rtx == orig_to_rtx)
4228 to_rtx = copy_rtx (to_rtx);
4229 MEM_VOLATILE_P (to_rtx) = 1;
4232 if (TREE_CODE (to) == COMPONENT_REF
4233 && TREE_READONLY (TREE_OPERAND (to, 1)))
4235 if (to_rtx == orig_to_rtx)
4236 to_rtx = copy_rtx (to_rtx);
4237 RTX_UNCHANGING_P (to_rtx) = 1;
4240 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4242 if (to_rtx == orig_to_rtx)
4243 to_rtx = copy_rtx (to_rtx);
4244 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4247 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4249 /* Spurious cast for HPUX compiler. */
4250 ? ((enum machine_mode)
4251 TYPE_MODE (TREE_TYPE (to)))
4253 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4255 preserve_temp_slots (result);
4259 /* If the value is meaningful, convert RESULT to the proper mode.
4260 Otherwise, return nothing. */
4261 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4262 TYPE_MODE (TREE_TYPE (from)),
4264 TREE_UNSIGNED (TREE_TYPE (to)))
4268 /* If the rhs is a function call and its value is not an aggregate,
4269 call the function before we start to compute the lhs.
4270 This is needed for correct code for cases such as
4271 val = setjmp (buf) on machines where reference to val
4272 requires loading up part of an address in a separate insn.
4274 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4275 since it might be a promoted variable where the zero- or sign- extension
4276 needs to be done. Handling this in the normal way is safe because no
4277 computation is done before the call. */
4278 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4279 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4280 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4281 && GET_CODE (DECL_RTL (to)) == REG))
4286 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4288 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4290 /* Handle calls that return values in multiple non-contiguous locations.
4291 The Irix 6 ABI has examples of this. */
4292 if (GET_CODE (to_rtx) == PARALLEL)
4293 emit_group_load (to_rtx, value, TREE_TYPE (from),
4294 int_size_in_bytes (TREE_TYPE (from)));
4295 else if (GET_MODE (to_rtx) == BLKmode)
4296 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4299 #ifdef POINTERS_EXTEND_UNSIGNED
4300 if (POINTER_TYPE_P (TREE_TYPE (to))
4301 && GET_MODE (to_rtx) != GET_MODE (value))
4302 value = convert_memory_address (GET_MODE (to_rtx), value);
4304 emit_move_insn (to_rtx, value);
4306 preserve_temp_slots (to_rtx);
4309 return want_value ? to_rtx : NULL_RTX;
4312 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4313 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4316 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4318 /* Don't move directly into a return register. */
4319 if (TREE_CODE (to) == RESULT_DECL
4320 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4325 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4327 if (GET_CODE (to_rtx) == PARALLEL)
4328 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4329 int_size_in_bytes (TREE_TYPE (from)));
4331 emit_move_insn (to_rtx, temp);
4333 preserve_temp_slots (to_rtx);
4336 return want_value ? to_rtx : NULL_RTX;
4339 /* In case we are returning the contents of an object which overlaps
4340 the place the value is being stored, use a safe function when copying
4341 a value through a pointer into a structure value return block. */
4342 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4343 && current_function_returns_struct
4344 && !current_function_returns_pcc_struct)
4349 size = expr_size (from);
4350 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4352 if (TARGET_MEM_FUNCTIONS)
4353 emit_library_call (memmove_libfunc, LCT_NORMAL,
4354 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4355 XEXP (from_rtx, 0), Pmode,
4356 convert_to_mode (TYPE_MODE (sizetype),
4357 size, TREE_UNSIGNED (sizetype)),
4358 TYPE_MODE (sizetype));
4360 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4361 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4362 XEXP (to_rtx, 0), Pmode,
4363 convert_to_mode (TYPE_MODE (integer_type_node),
4365 TREE_UNSIGNED (integer_type_node)),
4366 TYPE_MODE (integer_type_node));
4368 preserve_temp_slots (to_rtx);
4371 return want_value ? to_rtx : NULL_RTX;
4374 /* Compute FROM and store the value in the rtx we got. */
4377 result = store_expr (from, to_rtx, want_value);
4378 preserve_temp_slots (result);
4381 return want_value ? result : NULL_RTX;
4384 /* Generate code for computing expression EXP,
4385 and storing the value into TARGET.
4386 TARGET may contain a QUEUED rtx.
4388 If WANT_VALUE & 1 is nonzero, return a copy of the value
4389 not in TARGET, so that we can be sure to use the proper
4390 value in a containing expression even if TARGET has something
4391 else stored in it. If possible, we copy the value through a pseudo
4392 and return that pseudo. Or, if the value is constant, we try to
4393 return the constant. In some cases, we return a pseudo
4394 copied *from* TARGET.
4396 If the mode is BLKmode then we may return TARGET itself.
4397 It turns out that in BLKmode it doesn't cause a problem.
4398 because C has no operators that could combine two different
4399 assignments into the same BLKmode object with different values
4400 with no sequence point. Will other languages need this to
4403 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4404 to catch quickly any cases where the caller uses the value
4405 and fails to set WANT_VALUE.
4407 If WANT_VALUE & 2 is set, this is a store into a call param on the
4408 stack, and block moves may need to be treated specially. */
4411 store_expr (tree exp, rtx target, int want_value)
4414 int dont_return_target = 0;
4415 int dont_store_target = 0;
4417 if (VOID_TYPE_P (TREE_TYPE (exp)))
4419 /* C++ can generate ?: expressions with a throw expression in one
4420 branch and an rvalue in the other. Here, we resolve attempts to
4421 store the throw expression's nonexistent result. */
4424 expand_expr (exp, const0_rtx, VOIDmode, 0);
4427 if (TREE_CODE (exp) == COMPOUND_EXPR)
4429 /* Perform first part of compound expression, then assign from second
4431 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4432 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4434 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4436 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4438 /* For conditional expression, get safe form of the target. Then
4439 test the condition, doing the appropriate assignment on either
4440 side. This avoids the creation of unnecessary temporaries.
4441 For non-BLKmode, it is more efficient not to do this. */
4443 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4446 target = protect_from_queue (target, 1);
4448 do_pending_stack_adjust ();
4450 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4451 start_cleanup_deferral ();
4452 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4453 end_cleanup_deferral ();
4455 emit_jump_insn (gen_jump (lab2));
4458 start_cleanup_deferral ();
4459 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4460 end_cleanup_deferral ();
4465 return want_value & 1 ? target : NULL_RTX;
4467 else if (queued_subexp_p (target))
4468 /* If target contains a postincrement, let's not risk
4469 using it as the place to generate the rhs. */
4471 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4473 /* Expand EXP into a new pseudo. */
4474 temp = gen_reg_rtx (GET_MODE (target));
4475 temp = expand_expr (exp, temp, GET_MODE (target),
4477 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4480 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4482 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4484 /* If target is volatile, ANSI requires accessing the value
4485 *from* the target, if it is accessed. So make that happen.
4486 In no case return the target itself. */
4487 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4488 dont_return_target = 1;
4490 else if ((want_value & 1) != 0
4491 && GET_CODE (target) == MEM
4492 && ! MEM_VOLATILE_P (target)
4493 && GET_MODE (target) != BLKmode)
4494 /* If target is in memory and caller wants value in a register instead,
4495 arrange that. Pass TARGET as target for expand_expr so that,
4496 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4497 We know expand_expr will not use the target in that case.
4498 Don't do this if TARGET is volatile because we are supposed
4499 to write it and then read it. */
4501 temp = expand_expr (exp, target, GET_MODE (target),
4502 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4503 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4505 /* If TEMP is already in the desired TARGET, only copy it from
4506 memory and don't store it there again. */
4508 || (rtx_equal_p (temp, target)
4509 && ! side_effects_p (temp) && ! side_effects_p (target)))
4510 dont_store_target = 1;
4511 temp = copy_to_reg (temp);
4513 dont_return_target = 1;
4515 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4516 /* If this is a scalar in a register that is stored in a wider mode
4517 than the declared mode, compute the result into its declared mode
4518 and then convert to the wider mode. Our value is the computed
4521 rtx inner_target = 0;
4523 /* If we don't want a value, we can do the conversion inside EXP,
4524 which will often result in some optimizations. Do the conversion
4525 in two steps: first change the signedness, if needed, then
4526 the extend. But don't do this if the type of EXP is a subtype
4527 of something else since then the conversion might involve
4528 more than just converting modes. */
4529 if ((want_value & 1) == 0
4530 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4531 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4533 if (TREE_UNSIGNED (TREE_TYPE (exp))
4534 != SUBREG_PROMOTED_UNSIGNED_P (target))
4536 ((*lang_hooks.types.signed_or_unsigned_type)
4537 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4539 exp = convert ((*lang_hooks.types.type_for_mode)
4540 (GET_MODE (SUBREG_REG (target)),
4541 SUBREG_PROMOTED_UNSIGNED_P (target)),
4544 inner_target = SUBREG_REG (target);
4547 temp = expand_expr (exp, inner_target, VOIDmode,
4548 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4550 /* If TEMP is a MEM and we want a result value, make the access
4551 now so it gets done only once. Strictly speaking, this is
4552 only necessary if the MEM is volatile, or if the address
4553 overlaps TARGET. But not performing the load twice also
4554 reduces the amount of rtl we generate and then have to CSE. */
4555 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4556 temp = copy_to_reg (temp);
4558 /* If TEMP is a VOIDmode constant, use convert_modes to make
4559 sure that we properly convert it. */
4560 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4562 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4563 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4564 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4565 GET_MODE (target), temp,
4566 SUBREG_PROMOTED_UNSIGNED_P (target));
4569 convert_move (SUBREG_REG (target), temp,
4570 SUBREG_PROMOTED_UNSIGNED_P (target));
4572 /* If we promoted a constant, change the mode back down to match
4573 target. Otherwise, the caller might get confused by a result whose
4574 mode is larger than expected. */
4576 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4578 if (GET_MODE (temp) != VOIDmode)
4580 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4581 SUBREG_PROMOTED_VAR_P (temp) = 1;
4582 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4583 SUBREG_PROMOTED_UNSIGNED_P (target));
4586 temp = convert_modes (GET_MODE (target),
4587 GET_MODE (SUBREG_REG (target)),
4588 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4591 return want_value & 1 ? temp : NULL_RTX;
4595 temp = expand_expr (exp, target, GET_MODE (target),
4596 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4597 /* Return TARGET if it's a specified hardware register.
4598 If TARGET is a volatile mem ref, either return TARGET
4599 or return a reg copied *from* TARGET; ANSI requires this.
4601 Otherwise, if TEMP is not TARGET, return TEMP
4602 if it is constant (for efficiency),
4603 or if we really want the correct value. */
4604 if (!(target && GET_CODE (target) == REG
4605 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4606 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4607 && ! rtx_equal_p (temp, target)
4608 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4609 dont_return_target = 1;
4612 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4613 the same as that of TARGET, adjust the constant. This is needed, for
4614 example, in case it is a CONST_DOUBLE and we want only a word-sized
4616 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4617 && TREE_CODE (exp) != ERROR_MARK
4618 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4619 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4620 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4622 /* If value was not generated in the target, store it there.
4623 Convert the value to TARGET's type first if necessary.
4624 If TEMP and TARGET compare equal according to rtx_equal_p, but
4625 one or both of them are volatile memory refs, we have to distinguish
4627 - expand_expr has used TARGET. In this case, we must not generate
4628 another copy. This can be detected by TARGET being equal according
4630 - expand_expr has not used TARGET - that means that the source just
4631 happens to have the same RTX form. Since temp will have been created
4632 by expand_expr, it will compare unequal according to == .
4633 We must generate a copy in this case, to reach the correct number
4634 of volatile memory references. */
4636 if ((! rtx_equal_p (temp, target)
4637 || (temp != target && (side_effects_p (temp)
4638 || side_effects_p (target))))
4639 && TREE_CODE (exp) != ERROR_MARK
4640 && ! dont_store_target
4641 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4642 but TARGET is not valid memory reference, TEMP will differ
4643 from TARGET although it is really the same location. */
4644 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4645 || target != DECL_RTL_IF_SET (exp))
4646 /* If there's nothing to copy, don't bother. Don't call expr_size
4647 unless necessary, because some front-ends (C++) expr_size-hook
4648 aborts on objects that are not supposed to be bit-copied or
4650 && expr_size (exp) != const0_rtx)
4652 target = protect_from_queue (target, 1);
4653 if (GET_MODE (temp) != GET_MODE (target)
4654 && GET_MODE (temp) != VOIDmode)
4656 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4657 if (dont_return_target)
4659 /* In this case, we will return TEMP,
4660 so make sure it has the proper mode.
4661 But don't forget to store the value into TARGET. */
4662 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4663 emit_move_insn (target, temp);
4666 convert_move (target, temp, unsignedp);
4669 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4671 /* Handle copying a string constant into an array. The string
4672 constant may be shorter than the array. So copy just the string's
4673 actual length, and clear the rest. First get the size of the data
4674 type of the string, which is actually the size of the target. */
4675 rtx size = expr_size (exp);
4677 if (GET_CODE (size) == CONST_INT
4678 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4679 emit_block_move (target, temp, size,
4681 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4684 /* Compute the size of the data to copy from the string. */
4686 = size_binop (MIN_EXPR,
4687 make_tree (sizetype, size),
4688 size_int (TREE_STRING_LENGTH (exp)));
4690 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4692 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4695 /* Copy that much. */
4696 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4697 TREE_UNSIGNED (sizetype));
4698 emit_block_move (target, temp, copy_size_rtx,
4700 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4702 /* Figure out how much is left in TARGET that we have to clear.
4703 Do all calculations in ptr_mode. */
4704 if (GET_CODE (copy_size_rtx) == CONST_INT)
4706 size = plus_constant (size, -INTVAL (copy_size_rtx));
4707 target = adjust_address (target, BLKmode,
4708 INTVAL (copy_size_rtx));
4712 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4713 copy_size_rtx, NULL_RTX, 0,
4716 #ifdef POINTERS_EXTEND_UNSIGNED
4717 if (GET_MODE (copy_size_rtx) != Pmode)
4718 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4719 TREE_UNSIGNED (sizetype));
4722 target = offset_address (target, copy_size_rtx,
4723 highest_pow2_factor (copy_size));
4724 label = gen_label_rtx ();
4725 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4726 GET_MODE (size), 0, label);
4729 if (size != const0_rtx)
4730 clear_storage (target, size);
4736 /* Handle calls that return values in multiple non-contiguous locations.
4737 The Irix 6 ABI has examples of this. */
4738 else if (GET_CODE (target) == PARALLEL)
4739 emit_group_load (target, temp, TREE_TYPE (exp),
4740 int_size_in_bytes (TREE_TYPE (exp)));
4741 else if (GET_MODE (temp) == BLKmode)
4742 emit_block_move (target, temp, expr_size (exp),
4744 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4746 emit_move_insn (target, temp);
4749 /* If we don't want a value, return NULL_RTX. */
4750 if ((want_value & 1) == 0)
4753 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4754 ??? The latter test doesn't seem to make sense. */
4755 else if (dont_return_target && GET_CODE (temp) != MEM)
4758 /* Return TARGET itself if it is a hard register. */
4759 else if ((want_value & 1) != 0
4760 && GET_MODE (target) != BLKmode
4761 && ! (GET_CODE (target) == REG
4762 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4763 return copy_to_reg (target);
4769 /* Return 1 if EXP just contains zeros. */
4772 is_zeros_p (tree exp)
4776 switch (TREE_CODE (exp))
4780 case NON_LVALUE_EXPR:
4781 case VIEW_CONVERT_EXPR:
4782 return is_zeros_p (TREE_OPERAND (exp, 0));
4785 return integer_zerop (exp);
4789 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4792 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4795 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4796 elt = TREE_CHAIN (elt))
4797 if (!is_zeros_p (TREE_VALUE (elt)))
4803 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4804 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4805 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4806 if (! is_zeros_p (TREE_VALUE (elt)))
4816 /* Return 1 if EXP contains mostly (3/4) zeros. */
4819 mostly_zeros_p (tree exp)
4821 if (TREE_CODE (exp) == CONSTRUCTOR)
4823 int elts = 0, zeros = 0;
4824 tree elt = CONSTRUCTOR_ELTS (exp);
4825 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4827 /* If there are no ranges of true bits, it is all zero. */
4828 return elt == NULL_TREE;
4830 for (; elt; elt = TREE_CHAIN (elt))
4832 /* We do not handle the case where the index is a RANGE_EXPR,
4833 so the statistic will be somewhat inaccurate.
4834 We do make a more accurate count in store_constructor itself,
4835 so since this function is only used for nested array elements,
4836 this should be close enough. */
4837 if (mostly_zeros_p (TREE_VALUE (elt)))
4842 return 4 * zeros >= 3 * elts;
4845 return is_zeros_p (exp);
4848 /* Helper function for store_constructor.
4849 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4850 TYPE is the type of the CONSTRUCTOR, not the element type.
4851 CLEARED is as for store_constructor.
4852 ALIAS_SET is the alias set to use for any stores.
4854 This provides a recursive shortcut back to store_constructor when it isn't
4855 necessary to go through store_field. This is so that we can pass through
4856 the cleared field to let store_constructor know that we may not have to
4857 clear a substructure if the outer structure has already been cleared. */
4860 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4861 HOST_WIDE_INT bitpos, enum machine_mode mode,
4862 tree exp, tree type, int cleared, int alias_set)
4864 if (TREE_CODE (exp) == CONSTRUCTOR
4865 && bitpos % BITS_PER_UNIT == 0
4866 /* If we have a nonzero bitpos for a register target, then we just
4867 let store_field do the bitfield handling. This is unlikely to
4868 generate unnecessary clear instructions anyways. */
4869 && (bitpos == 0 || GET_CODE (target) == MEM))
4871 if (GET_CODE (target) == MEM)
4873 = adjust_address (target,
4874 GET_MODE (target) == BLKmode
4876 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4877 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4880 /* Update the alias set, if required. */
4881 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4882 && MEM_ALIAS_SET (target) != 0)
4884 target = copy_rtx (target);
4885 set_mem_alias_set (target, alias_set);
4888 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4891 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4895 /* Store the value of constructor EXP into the rtx TARGET.
4896 TARGET is either a REG or a MEM; we know it cannot conflict, since
4897 safe_from_p has been called.
4898 CLEARED is true if TARGET is known to have been zero'd.
4899 SIZE is the number of bytes of TARGET we are allowed to modify: this
4900 may not be the same as the size of EXP if we are assigning to a field
4901 which has been packed to exclude padding bits. */
4904 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4906 tree type = TREE_TYPE (exp);
4907 #ifdef WORD_REGISTER_OPERATIONS
4908 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4911 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4912 || TREE_CODE (type) == QUAL_UNION_TYPE)
4916 /* If size is zero or the target is already cleared, do nothing. */
4917 if (size == 0 || cleared)
4919 /* We either clear the aggregate or indicate the value is dead. */
4920 else if ((TREE_CODE (type) == UNION_TYPE
4921 || TREE_CODE (type) == QUAL_UNION_TYPE)
4922 && ! CONSTRUCTOR_ELTS (exp))
4923 /* If the constructor is empty, clear the union. */
4925 clear_storage (target, expr_size (exp));
4929 /* If we are building a static constructor into a register,
4930 set the initial value as zero so we can fold the value into
4931 a constant. But if more than one register is involved,
4932 this probably loses. */
4933 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4934 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4936 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4940 /* If the constructor has fewer fields than the structure
4941 or if we are initializing the structure to mostly zeros,
4942 clear the whole structure first. Don't do this if TARGET is a
4943 register whose mode size isn't equal to SIZE since clear_storage
4944 can't handle this case. */
4945 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4946 || mostly_zeros_p (exp))
4947 && (GET_CODE (target) != REG
4948 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4951 rtx xtarget = target;
4953 if (readonly_fields_p (type))
4955 xtarget = copy_rtx (xtarget);
4956 RTX_UNCHANGING_P (xtarget) = 1;
4959 clear_storage (xtarget, GEN_INT (size));
4964 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4966 /* Store each element of the constructor into
4967 the corresponding field of TARGET. */
4969 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4971 tree field = TREE_PURPOSE (elt);
4972 tree value = TREE_VALUE (elt);
4973 enum machine_mode mode;
4974 HOST_WIDE_INT bitsize;
4975 HOST_WIDE_INT bitpos = 0;
4977 rtx to_rtx = target;
4979 /* Just ignore missing fields.
4980 We cleared the whole structure, above,
4981 if any fields are missing. */
4985 if (cleared && is_zeros_p (value))
4988 if (host_integerp (DECL_SIZE (field), 1))
4989 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4993 mode = DECL_MODE (field);
4994 if (DECL_BIT_FIELD (field))
4997 offset = DECL_FIELD_OFFSET (field);
4998 if (host_integerp (offset, 0)
4999 && host_integerp (bit_position (field), 0))
5001 bitpos = int_bit_position (field);
5005 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5011 if (CONTAINS_PLACEHOLDER_P (offset))
5012 offset = build (WITH_RECORD_EXPR, sizetype,
5013 offset, make_tree (TREE_TYPE (exp), target));
5015 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5016 if (GET_CODE (to_rtx) != MEM)
5019 #ifdef POINTERS_EXTEND_UNSIGNED
5020 if (GET_MODE (offset_rtx) != Pmode)
5021 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5023 if (GET_MODE (offset_rtx) != ptr_mode)
5024 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5027 to_rtx = offset_address (to_rtx, offset_rtx,
5028 highest_pow2_factor (offset));
5031 if (TREE_READONLY (field))
5033 if (GET_CODE (to_rtx) == MEM)
5034 to_rtx = copy_rtx (to_rtx);
5036 RTX_UNCHANGING_P (to_rtx) = 1;
5039 #ifdef WORD_REGISTER_OPERATIONS
5040 /* If this initializes a field that is smaller than a word, at the
5041 start of a word, try to widen it to a full word.
5042 This special case allows us to output C++ member function
5043 initializations in a form that the optimizers can understand. */
5044 if (GET_CODE (target) == REG
5045 && bitsize < BITS_PER_WORD
5046 && bitpos % BITS_PER_WORD == 0
5047 && GET_MODE_CLASS (mode) == MODE_INT
5048 && TREE_CODE (value) == INTEGER_CST
5050 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5052 tree type = TREE_TYPE (value);
5054 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5056 type = (*lang_hooks.types.type_for_size)
5057 (BITS_PER_WORD, TREE_UNSIGNED (type));
5058 value = convert (type, value);
5061 if (BYTES_BIG_ENDIAN)
5063 = fold (build (LSHIFT_EXPR, type, value,
5064 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5065 bitsize = BITS_PER_WORD;
5070 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5071 && DECL_NONADDRESSABLE_P (field))
5073 to_rtx = copy_rtx (to_rtx);
5074 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5077 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5078 value, type, cleared,
5079 get_alias_set (TREE_TYPE (field)));
5082 else if (TREE_CODE (type) == ARRAY_TYPE
5083 || TREE_CODE (type) == VECTOR_TYPE)
5088 tree domain = TYPE_DOMAIN (type);
5089 tree elttype = TREE_TYPE (type);
5091 HOST_WIDE_INT minelt = 0;
5092 HOST_WIDE_INT maxelt = 0;
5094 /* Vectors are like arrays, but the domain is stored via an array
5096 if (TREE_CODE (type) == VECTOR_TYPE)
5098 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5099 the same field as TYPE_DOMAIN, we are not guaranteed that
5101 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5102 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5105 const_bounds_p = (TYPE_MIN_VALUE (domain)
5106 && TYPE_MAX_VALUE (domain)
5107 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5108 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5110 /* If we have constant bounds for the range of the type, get them. */
5113 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5114 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5117 /* If the constructor has fewer elements than the array,
5118 clear the whole array first. Similarly if this is
5119 static constructor of a non-BLKmode object. */
5120 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5124 HOST_WIDE_INT count = 0, zero_count = 0;
5125 need_to_clear = ! const_bounds_p;
5127 /* This loop is a more accurate version of the loop in
5128 mostly_zeros_p (it handles RANGE_EXPR in an index).
5129 It is also needed to check for missing elements. */
5130 for (elt = CONSTRUCTOR_ELTS (exp);
5131 elt != NULL_TREE && ! need_to_clear;
5132 elt = TREE_CHAIN (elt))
5134 tree index = TREE_PURPOSE (elt);
5135 HOST_WIDE_INT this_node_count;
5137 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5139 tree lo_index = TREE_OPERAND (index, 0);
5140 tree hi_index = TREE_OPERAND (index, 1);
5142 if (! host_integerp (lo_index, 1)
5143 || ! host_integerp (hi_index, 1))
5149 this_node_count = (tree_low_cst (hi_index, 1)
5150 - tree_low_cst (lo_index, 1) + 1);
5153 this_node_count = 1;
5155 count += this_node_count;
5156 if (mostly_zeros_p (TREE_VALUE (elt)))
5157 zero_count += this_node_count;
5160 /* Clear the entire array first if there are any missing elements,
5161 or if the incidence of zero elements is >= 75%. */
5163 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5167 if (need_to_clear && size > 0)
5172 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5174 clear_storage (target, GEN_INT (size));
5178 else if (REG_P (target))
5179 /* Inform later passes that the old value is dead. */
5180 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5182 /* Store each element of the constructor into
5183 the corresponding element of TARGET, determined
5184 by counting the elements. */
5185 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5187 elt = TREE_CHAIN (elt), i++)
5189 enum machine_mode mode;
5190 HOST_WIDE_INT bitsize;
5191 HOST_WIDE_INT bitpos;
5193 tree value = TREE_VALUE (elt);
5194 tree index = TREE_PURPOSE (elt);
5195 rtx xtarget = target;
5197 if (cleared && is_zeros_p (value))
5200 unsignedp = TREE_UNSIGNED (elttype);
5201 mode = TYPE_MODE (elttype);
5202 if (mode == BLKmode)
5203 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5204 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5207 bitsize = GET_MODE_BITSIZE (mode);
5209 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5211 tree lo_index = TREE_OPERAND (index, 0);
5212 tree hi_index = TREE_OPERAND (index, 1);
5213 rtx index_r, pos_rtx, loop_end;
5214 struct nesting *loop;
5215 HOST_WIDE_INT lo, hi, count;
5218 /* If the range is constant and "small", unroll the loop. */
5220 && host_integerp (lo_index, 0)
5221 && host_integerp (hi_index, 0)
5222 && (lo = tree_low_cst (lo_index, 0),
5223 hi = tree_low_cst (hi_index, 0),
5224 count = hi - lo + 1,
5225 (GET_CODE (target) != MEM
5227 || (host_integerp (TYPE_SIZE (elttype), 1)
5228 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5231 lo -= minelt; hi -= minelt;
5232 for (; lo <= hi; lo++)
5234 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5236 if (GET_CODE (target) == MEM
5237 && !MEM_KEEP_ALIAS_SET_P (target)
5238 && TREE_CODE (type) == ARRAY_TYPE
5239 && TYPE_NONALIASED_COMPONENT (type))
5241 target = copy_rtx (target);
5242 MEM_KEEP_ALIAS_SET_P (target) = 1;
5245 store_constructor_field
5246 (target, bitsize, bitpos, mode, value, type, cleared,
5247 get_alias_set (elttype));
5252 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5253 loop_end = gen_label_rtx ();
5255 unsignedp = TREE_UNSIGNED (domain);
5257 index = build_decl (VAR_DECL, NULL_TREE, domain);
5260 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5262 SET_DECL_RTL (index, index_r);
5263 if (TREE_CODE (value) == SAVE_EXPR
5264 && SAVE_EXPR_RTL (value) == 0)
5266 /* Make sure value gets expanded once before the
5268 expand_expr (value, const0_rtx, VOIDmode, 0);
5271 store_expr (lo_index, index_r, 0);
5272 loop = expand_start_loop (0);
5274 /* Assign value to element index. */
5276 = convert (ssizetype,
5277 fold (build (MINUS_EXPR, TREE_TYPE (index),
5278 index, TYPE_MIN_VALUE (domain))));
5279 position = size_binop (MULT_EXPR, position,
5281 TYPE_SIZE_UNIT (elttype)));
5283 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5284 xtarget = offset_address (target, pos_rtx,
5285 highest_pow2_factor (position));
5286 xtarget = adjust_address (xtarget, mode, 0);
5287 if (TREE_CODE (value) == CONSTRUCTOR)
5288 store_constructor (value, xtarget, cleared,
5289 bitsize / BITS_PER_UNIT);
5291 store_expr (value, xtarget, 0);
5293 expand_exit_loop_if_false (loop,
5294 build (LT_EXPR, integer_type_node,
5297 expand_increment (build (PREINCREMENT_EXPR,
5299 index, integer_one_node), 0, 0);
5301 emit_label (loop_end);
5304 else if ((index != 0 && ! host_integerp (index, 0))
5305 || ! host_integerp (TYPE_SIZE (elttype), 1))
5310 index = ssize_int (1);
5313 index = convert (ssizetype,
5314 fold (build (MINUS_EXPR, index,
5315 TYPE_MIN_VALUE (domain))));
5317 position = size_binop (MULT_EXPR, index,
5319 TYPE_SIZE_UNIT (elttype)));
5320 xtarget = offset_address (target,
5321 expand_expr (position, 0, VOIDmode, 0),
5322 highest_pow2_factor (position));
5323 xtarget = adjust_address (xtarget, mode, 0);
5324 store_expr (value, xtarget, 0);
5329 bitpos = ((tree_low_cst (index, 0) - minelt)
5330 * tree_low_cst (TYPE_SIZE (elttype), 1));
5332 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5334 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5335 && TREE_CODE (type) == ARRAY_TYPE
5336 && TYPE_NONALIASED_COMPONENT (type))
5338 target = copy_rtx (target);
5339 MEM_KEEP_ALIAS_SET_P (target) = 1;
5342 store_constructor_field (target, bitsize, bitpos, mode, value,
5343 type, cleared, get_alias_set (elttype));
5349 /* Set constructor assignments. */
5350 else if (TREE_CODE (type) == SET_TYPE)
5352 tree elt = CONSTRUCTOR_ELTS (exp);
5353 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5354 tree domain = TYPE_DOMAIN (type);
5355 tree domain_min, domain_max, bitlength;
5357 /* The default implementation strategy is to extract the constant
5358 parts of the constructor, use that to initialize the target,
5359 and then "or" in whatever non-constant ranges we need in addition.
5361 If a large set is all zero or all ones, it is
5362 probably better to set it using memset (if available) or bzero.
5363 Also, if a large set has just a single range, it may also be
5364 better to first clear all the first clear the set (using
5365 bzero/memset), and set the bits we want. */
5367 /* Check for all zeros. */
5368 if (elt == NULL_TREE && size > 0)
5371 clear_storage (target, GEN_INT (size));
5375 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5376 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5377 bitlength = size_binop (PLUS_EXPR,
5378 size_diffop (domain_max, domain_min),
5381 nbits = tree_low_cst (bitlength, 1);
5383 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5384 are "complicated" (more than one range), initialize (the
5385 constant parts) by copying from a constant. */
5386 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5387 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5389 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5390 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5391 char *bit_buffer = (char *) alloca (nbits);
5392 HOST_WIDE_INT word = 0;
5393 unsigned int bit_pos = 0;
5394 unsigned int ibit = 0;
5395 unsigned int offset = 0; /* In bytes from beginning of set. */
5397 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5400 if (bit_buffer[ibit])
5402 if (BYTES_BIG_ENDIAN)
5403 word |= (1 << (set_word_size - 1 - bit_pos));
5405 word |= 1 << bit_pos;
5409 if (bit_pos >= set_word_size || ibit == nbits)
5411 if (word != 0 || ! cleared)
5413 rtx datum = GEN_INT (word);
5416 /* The assumption here is that it is safe to use
5417 XEXP if the set is multi-word, but not if
5418 it's single-word. */
5419 if (GET_CODE (target) == MEM)
5420 to_rtx = adjust_address (target, mode, offset);
5421 else if (offset == 0)
5425 emit_move_insn (to_rtx, datum);
5432 offset += set_word_size / BITS_PER_UNIT;
5437 /* Don't bother clearing storage if the set is all ones. */
5438 if (TREE_CHAIN (elt) != NULL_TREE
5439 || (TREE_PURPOSE (elt) == NULL_TREE
5441 : ( ! host_integerp (TREE_VALUE (elt), 0)
5442 || ! host_integerp (TREE_PURPOSE (elt), 0)
5443 || (tree_low_cst (TREE_VALUE (elt), 0)
5444 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5445 != (HOST_WIDE_INT) nbits))))
5446 clear_storage (target, expr_size (exp));
5448 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5450 /* Start of range of element or NULL. */
5451 tree startbit = TREE_PURPOSE (elt);
5452 /* End of range of element, or element value. */
5453 tree endbit = TREE_VALUE (elt);
5454 HOST_WIDE_INT startb, endb;
5455 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5457 bitlength_rtx = expand_expr (bitlength,
5458 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5460 /* Handle non-range tuple element like [ expr ]. */
5461 if (startbit == NULL_TREE)
5463 startbit = save_expr (endbit);
5467 startbit = convert (sizetype, startbit);
5468 endbit = convert (sizetype, endbit);
5469 if (! integer_zerop (domain_min))
5471 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5472 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5474 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5475 EXPAND_CONST_ADDRESS);
5476 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5477 EXPAND_CONST_ADDRESS);
5483 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5484 (GET_MODE (target), 0),
5487 emit_move_insn (targetx, target);
5490 else if (GET_CODE (target) == MEM)
5495 /* Optimization: If startbit and endbit are constants divisible
5496 by BITS_PER_UNIT, call memset instead. */
5497 if (TARGET_MEM_FUNCTIONS
5498 && TREE_CODE (startbit) == INTEGER_CST
5499 && TREE_CODE (endbit) == INTEGER_CST
5500 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5501 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5503 emit_library_call (memset_libfunc, LCT_NORMAL,
5505 plus_constant (XEXP (targetx, 0),
5506 startb / BITS_PER_UNIT),
5508 constm1_rtx, TYPE_MODE (integer_type_node),
5509 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5510 TYPE_MODE (sizetype));
5513 emit_library_call (setbits_libfunc, LCT_NORMAL,
5514 VOIDmode, 4, XEXP (targetx, 0),
5515 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5516 startbit_rtx, TYPE_MODE (sizetype),
5517 endbit_rtx, TYPE_MODE (sizetype));
5520 emit_move_insn (target, targetx);
5528 /* Store the value of EXP (an expression tree)
5529 into a subfield of TARGET which has mode MODE and occupies
5530 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5531 If MODE is VOIDmode, it means that we are storing into a bit-field.
5533 If VALUE_MODE is VOIDmode, return nothing in particular.
5534 UNSIGNEDP is not used in this case.
5536 Otherwise, return an rtx for the value stored. This rtx
5537 has mode VALUE_MODE if that is convenient to do.
5538 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5540 TYPE is the type of the underlying object,
5542 ALIAS_SET is the alias set for the destination. This value will
5543 (in general) be different from that for TARGET, since TARGET is a
5544 reference to the containing structure. */
5547 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5548 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5549 int unsignedp, tree type, int alias_set)
5551 HOST_WIDE_INT width_mask = 0;
5553 if (TREE_CODE (exp) == ERROR_MARK)
5556 /* If we have nothing to store, do nothing unless the expression has
5559 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5560 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5561 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5563 /* If we are storing into an unaligned field of an aligned union that is
5564 in a register, we may have the mode of TARGET being an integer mode but
5565 MODE == BLKmode. In that case, get an aligned object whose size and
5566 alignment are the same as TARGET and store TARGET into it (we can avoid
5567 the store if the field being stored is the entire width of TARGET). Then
5568 call ourselves recursively to store the field into a BLKmode version of
5569 that object. Finally, load from the object into TARGET. This is not
5570 very efficient in general, but should only be slightly more expensive
5571 than the otherwise-required unaligned accesses. Perhaps this can be
5572 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5573 twice, once with emit_move_insn and once via store_field. */
5576 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5578 rtx object = assign_temp (type, 0, 1, 1);
5579 rtx blk_object = adjust_address (object, BLKmode, 0);
5581 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5582 emit_move_insn (object, target);
5584 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5587 emit_move_insn (target, object);
5589 /* We want to return the BLKmode version of the data. */
5593 if (GET_CODE (target) == CONCAT)
5595 /* We're storing into a struct containing a single __complex. */
5599 return store_expr (exp, target, 0);
5602 /* If the structure is in a register or if the component
5603 is a bit field, we cannot use addressing to access it.
5604 Use bit-field techniques or SUBREG to store in it. */
5606 if (mode == VOIDmode
5607 || (mode != BLKmode && ! direct_store[(int) mode]
5608 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5609 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5610 || GET_CODE (target) == REG
5611 || GET_CODE (target) == SUBREG
5612 /* If the field isn't aligned enough to store as an ordinary memref,
5613 store it as a bit field. */
5615 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5616 || bitpos % GET_MODE_ALIGNMENT (mode))
5617 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5618 || (bitpos % BITS_PER_UNIT != 0)))
5619 /* If the RHS and field are a constant size and the size of the
5620 RHS isn't the same size as the bitfield, we must use bitfield
5623 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5624 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5626 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5628 /* If BITSIZE is narrower than the size of the type of EXP
5629 we will be narrowing TEMP. Normally, what's wanted are the
5630 low-order bits. However, if EXP's type is a record and this is
5631 big-endian machine, we want the upper BITSIZE bits. */
5632 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5633 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5634 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5635 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5636 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5640 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5642 if (mode != VOIDmode && mode != BLKmode
5643 && mode != TYPE_MODE (TREE_TYPE (exp)))
5644 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5646 /* If the modes of TARGET and TEMP are both BLKmode, both
5647 must be in memory and BITPOS must be aligned on a byte
5648 boundary. If so, we simply do a block copy. */
5649 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5651 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5652 || bitpos % BITS_PER_UNIT != 0)
5655 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5656 emit_block_move (target, temp,
5657 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5661 return value_mode == VOIDmode ? const0_rtx : target;
5664 /* Store the value in the bitfield. */
5665 store_bit_field (target, bitsize, bitpos, mode, temp,
5666 int_size_in_bytes (type));
5668 if (value_mode != VOIDmode)
5670 /* The caller wants an rtx for the value.
5671 If possible, avoid refetching from the bitfield itself. */
5673 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5676 enum machine_mode tmode;
5678 tmode = GET_MODE (temp);
5679 if (tmode == VOIDmode)
5683 return expand_and (tmode, temp,
5684 gen_int_mode (width_mask, tmode),
5687 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5688 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5689 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5692 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5693 NULL_RTX, value_mode, VOIDmode,
5694 int_size_in_bytes (type));
5700 rtx addr = XEXP (target, 0);
5701 rtx to_rtx = target;
5703 /* If a value is wanted, it must be the lhs;
5704 so make the address stable for multiple use. */
5706 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5707 && ! CONSTANT_ADDRESS_P (addr)
5708 /* A frame-pointer reference is already stable. */
5709 && ! (GET_CODE (addr) == PLUS
5710 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5711 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5712 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5713 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5715 /* Now build a reference to just the desired component. */
5717 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5719 if (to_rtx == target)
5720 to_rtx = copy_rtx (to_rtx);
5722 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5723 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5724 set_mem_alias_set (to_rtx, alias_set);
5726 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5730 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5731 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5732 codes and find the ultimate containing object, which we return.
5734 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5735 bit position, and *PUNSIGNEDP to the signedness of the field.
5736 If the position of the field is variable, we store a tree
5737 giving the variable offset (in units) in *POFFSET.
5738 This offset is in addition to the bit position.
5739 If the position is not variable, we store 0 in *POFFSET.
5741 If any of the extraction expressions is volatile,
5742 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5744 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5745 is a mode that can be used to access the field. In that case, *PBITSIZE
5748 If the field describes a variable-sized object, *PMODE is set to
5749 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5750 this case, but the address of the object can be found. */
5753 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5754 HOST_WIDE_INT *pbitpos, tree *poffset,
5755 enum machine_mode *pmode, int *punsignedp,
5759 enum machine_mode mode = VOIDmode;
5760 tree offset = size_zero_node;
5761 tree bit_offset = bitsize_zero_node;
5762 tree placeholder_ptr = 0;
5765 /* First get the mode, signedness, and size. We do this from just the
5766 outermost expression. */
5767 if (TREE_CODE (exp) == COMPONENT_REF)
5769 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5770 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5771 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5773 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5775 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5777 size_tree = TREE_OPERAND (exp, 1);
5778 *punsignedp = TREE_UNSIGNED (exp);
5782 mode = TYPE_MODE (TREE_TYPE (exp));
5783 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5785 if (mode == BLKmode)
5786 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5788 *pbitsize = GET_MODE_BITSIZE (mode);
5793 if (! host_integerp (size_tree, 1))
5794 mode = BLKmode, *pbitsize = -1;
5796 *pbitsize = tree_low_cst (size_tree, 1);
5799 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5800 and find the ultimate containing object. */
5803 if (TREE_CODE (exp) == BIT_FIELD_REF)
5804 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5805 else if (TREE_CODE (exp) == COMPONENT_REF)
5807 tree field = TREE_OPERAND (exp, 1);
5808 tree this_offset = DECL_FIELD_OFFSET (field);
5810 /* If this field hasn't been filled in yet, don't go
5811 past it. This should only happen when folding expressions
5812 made during type construction. */
5813 if (this_offset == 0)
5815 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5816 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5818 offset = size_binop (PLUS_EXPR, offset, this_offset);
5819 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5820 DECL_FIELD_BIT_OFFSET (field));
5822 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5825 else if (TREE_CODE (exp) == ARRAY_REF
5826 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5828 tree index = TREE_OPERAND (exp, 1);
5829 tree array = TREE_OPERAND (exp, 0);
5830 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5831 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5832 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5834 /* We assume all arrays have sizes that are a multiple of a byte.
5835 First subtract the lower bound, if any, in the type of the
5836 index, then convert to sizetype and multiply by the size of the
5838 if (low_bound != 0 && ! integer_zerop (low_bound))
5839 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5842 /* If the index has a self-referential type, pass it to a
5843 WITH_RECORD_EXPR; if the component size is, pass our
5844 component to one. */
5845 if (CONTAINS_PLACEHOLDER_P (index))
5846 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5847 if (CONTAINS_PLACEHOLDER_P (unit_size))
5848 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5850 offset = size_binop (PLUS_EXPR, offset,
5851 size_binop (MULT_EXPR,
5852 convert (sizetype, index),
5856 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5858 tree new = find_placeholder (exp, &placeholder_ptr);
5860 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5861 We might have been called from tree optimization where we
5862 haven't set up an object yet. */
5871 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5872 conversions that don't change the mode, and all view conversions
5873 except those that need to "step up" the alignment. */
5874 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5875 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5876 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5877 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5879 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5880 < BIGGEST_ALIGNMENT)
5881 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5882 || TYPE_ALIGN_OK (TREE_TYPE
5883 (TREE_OPERAND (exp, 0))))))
5884 && ! ((TREE_CODE (exp) == NOP_EXPR
5885 || TREE_CODE (exp) == CONVERT_EXPR)
5886 && (TYPE_MODE (TREE_TYPE (exp))
5887 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5890 /* If any reference in the chain is volatile, the effect is volatile. */
5891 if (TREE_THIS_VOLATILE (exp))
5894 exp = TREE_OPERAND (exp, 0);
5897 /* If OFFSET is constant, see if we can return the whole thing as a
5898 constant bit position. Otherwise, split it up. */
5899 if (host_integerp (offset, 0)
5900 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5902 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5903 && host_integerp (tem, 0))
5904 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5906 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5912 /* Return 1 if T is an expression that get_inner_reference handles. */
5915 handled_component_p (tree t)
5917 switch (TREE_CODE (t))
5922 case ARRAY_RANGE_REF:
5923 case NON_LVALUE_EXPR:
5924 case VIEW_CONVERT_EXPR:
5927 /* ??? Sure they are handled, but get_inner_reference may return
5928 a different PBITSIZE, depending upon whether the expression is
5929 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5932 return (TYPE_MODE (TREE_TYPE (t))
5933 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5940 /* Given an rtx VALUE that may contain additions and multiplications, return
5941 an equivalent value that just refers to a register, memory, or constant.
5942 This is done by generating instructions to perform the arithmetic and
5943 returning a pseudo-register containing the value.
5945 The returned value may be a REG, SUBREG, MEM or constant. */
5948 force_operand (rtx value, rtx target)
5951 /* Use subtarget as the target for operand 0 of a binary operation. */
5952 rtx subtarget = get_subtarget (target);
5953 enum rtx_code code = GET_CODE (value);
5955 /* Check for a PIC address load. */
5956 if ((code == PLUS || code == MINUS)
5957 && XEXP (value, 0) == pic_offset_table_rtx
5958 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5959 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5960 || GET_CODE (XEXP (value, 1)) == CONST))
5963 subtarget = gen_reg_rtx (GET_MODE (value));
5964 emit_move_insn (subtarget, value);
5968 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5971 target = gen_reg_rtx (GET_MODE (value));
5972 convert_move (target, force_operand (XEXP (value, 0), NULL),
5973 code == ZERO_EXTEND);
5977 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5979 op2 = XEXP (value, 1);
5980 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5982 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5985 op2 = negate_rtx (GET_MODE (value), op2);
5988 /* Check for an addition with OP2 a constant integer and our first
5989 operand a PLUS of a virtual register and something else. In that
5990 case, we want to emit the sum of the virtual register and the
5991 constant first and then add the other value. This allows virtual
5992 register instantiation to simply modify the constant rather than
5993 creating another one around this addition. */
5994 if (code == PLUS && GET_CODE (op2) == CONST_INT
5995 && GET_CODE (XEXP (value, 0)) == PLUS
5996 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5997 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5998 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6000 rtx temp = expand_simple_binop (GET_MODE (value), code,
6001 XEXP (XEXP (value, 0), 0), op2,
6002 subtarget, 0, OPTAB_LIB_WIDEN);
6003 return expand_simple_binop (GET_MODE (value), code, temp,
6004 force_operand (XEXP (XEXP (value,
6006 target, 0, OPTAB_LIB_WIDEN);
6009 op1 = force_operand (XEXP (value, 0), subtarget);
6010 op2 = force_operand (op2, NULL_RTX);
6014 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6016 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6017 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6018 target, 1, OPTAB_LIB_WIDEN);
6020 return expand_divmod (0,
6021 FLOAT_MODE_P (GET_MODE (value))
6022 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6023 GET_MODE (value), op1, op2, target, 0);
6026 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6030 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6034 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6038 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6039 target, 0, OPTAB_LIB_WIDEN);
6042 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6043 target, 1, OPTAB_LIB_WIDEN);
6046 if (GET_RTX_CLASS (code) == '1')
6048 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6049 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6052 #ifdef INSN_SCHEDULING
6053 /* On machines that have insn scheduling, we want all memory reference to be
6054 explicit, so we need to deal with such paradoxical SUBREGs. */
6055 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6056 && (GET_MODE_SIZE (GET_MODE (value))
6057 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6059 = simplify_gen_subreg (GET_MODE (value),
6060 force_reg (GET_MODE (SUBREG_REG (value)),
6061 force_operand (SUBREG_REG (value),
6063 GET_MODE (SUBREG_REG (value)),
6064 SUBREG_BYTE (value));
6070 /* Subroutine of expand_expr: return nonzero iff there is no way that
6071 EXP can reference X, which is being modified. TOP_P is nonzero if this
6072 call is going to be used to determine whether we need a temporary
6073 for EXP, as opposed to a recursive call to this function.
6075 It is always safe for this routine to return zero since it merely
6076 searches for optimization opportunities. */
6079 safe_from_p (rtx x, tree exp, int top_p)
6083 static tree save_expr_list;
6086 /* If EXP has varying size, we MUST use a target since we currently
6087 have no way of allocating temporaries of variable size
6088 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6089 So we assume here that something at a higher level has prevented a
6090 clash. This is somewhat bogus, but the best we can do. Only
6091 do this when X is BLKmode and when we are at the top level. */
6092 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6094 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6095 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6096 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6098 && GET_MODE (x) == BLKmode)
6099 /* If X is in the outgoing argument area, it is always safe. */
6100 || (GET_CODE (x) == MEM
6101 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6102 || (GET_CODE (XEXP (x, 0)) == PLUS
6103 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6106 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6107 find the underlying pseudo. */
6108 if (GET_CODE (x) == SUBREG)
6111 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6115 /* A SAVE_EXPR might appear many times in the expression passed to the
6116 top-level safe_from_p call, and if it has a complex subexpression,
6117 examining it multiple times could result in a combinatorial explosion.
6118 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6119 with optimization took about 28 minutes to compile -- even though it was
6120 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6121 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6122 we have processed. Note that the only test of top_p was above. */
6131 rtn = safe_from_p (x, exp, 0);
6133 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6134 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6139 /* Now look at our tree code and possibly recurse. */
6140 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6143 exp_rtl = DECL_RTL_IF_SET (exp);
6150 if (TREE_CODE (exp) == TREE_LIST)
6154 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6156 exp = TREE_CHAIN (exp);
6159 if (TREE_CODE (exp) != TREE_LIST)
6160 return safe_from_p (x, exp, 0);
6163 else if (TREE_CODE (exp) == ERROR_MARK)
6164 return 1; /* An already-visited SAVE_EXPR? */
6170 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6175 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6179 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6180 the expression. If it is set, we conflict iff we are that rtx or
6181 both are in memory. Otherwise, we check all operands of the
6182 expression recursively. */
6184 switch (TREE_CODE (exp))
6187 /* If the operand is static or we are static, we can't conflict.
6188 Likewise if we don't conflict with the operand at all. */
6189 if (staticp (TREE_OPERAND (exp, 0))
6190 || TREE_STATIC (exp)
6191 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6194 /* Otherwise, the only way this can conflict is if we are taking
6195 the address of a DECL a that address if part of X, which is
6197 exp = TREE_OPERAND (exp, 0);
6200 if (!DECL_RTL_SET_P (exp)
6201 || GET_CODE (DECL_RTL (exp)) != MEM)
6204 exp_rtl = XEXP (DECL_RTL (exp), 0);
6209 if (GET_CODE (x) == MEM
6210 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6211 get_alias_set (exp)))
6216 /* Assume that the call will clobber all hard registers and
6218 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6219 || GET_CODE (x) == MEM)
6224 /* If a sequence exists, we would have to scan every instruction
6225 in the sequence to see if it was safe. This is probably not
6227 if (RTL_EXPR_SEQUENCE (exp))
6230 exp_rtl = RTL_EXPR_RTL (exp);
6233 case WITH_CLEANUP_EXPR:
6234 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6237 case CLEANUP_POINT_EXPR:
6238 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6241 exp_rtl = SAVE_EXPR_RTL (exp);
6245 /* If we've already scanned this, don't do it again. Otherwise,
6246 show we've scanned it and record for clearing the flag if we're
6248 if (TREE_PRIVATE (exp))
6251 TREE_PRIVATE (exp) = 1;
6252 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6254 TREE_PRIVATE (exp) = 0;
6258 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6262 /* The only operand we look at is operand 1. The rest aren't
6263 part of the expression. */
6264 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6266 case METHOD_CALL_EXPR:
6267 /* This takes an rtx argument, but shouldn't appear here. */
6274 /* If we have an rtx, we do not need to scan our operands. */
6278 nops = first_rtl_op (TREE_CODE (exp));
6279 for (i = 0; i < nops; i++)
6280 if (TREE_OPERAND (exp, i) != 0
6281 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6284 /* If this is a language-specific tree code, it may require
6285 special handling. */
6286 if ((unsigned int) TREE_CODE (exp)
6287 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6288 && !(*lang_hooks.safe_from_p) (x, exp))
6292 /* If we have an rtl, find any enclosed object. Then see if we conflict
6296 if (GET_CODE (exp_rtl) == SUBREG)
6298 exp_rtl = SUBREG_REG (exp_rtl);
6299 if (GET_CODE (exp_rtl) == REG
6300 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6304 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6305 are memory and they conflict. */
6306 return ! (rtx_equal_p (x, exp_rtl)
6307 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6308 && true_dependence (exp_rtl, VOIDmode, x,
6309 rtx_addr_varies_p)));
6312 /* If we reach here, it is safe. */
6316 /* Subroutine of expand_expr: return rtx if EXP is a
6317 variable or parameter; else return 0. */
6323 switch (TREE_CODE (exp))
6327 return DECL_RTL (exp);
6333 #ifdef MAX_INTEGER_COMPUTATION_MODE
6336 check_max_integer_computation_mode (tree exp)
6338 enum tree_code code;
6339 enum machine_mode mode;
6341 /* Strip any NOPs that don't change the mode. */
6343 code = TREE_CODE (exp);
6345 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6346 if (code == NOP_EXPR
6347 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6350 /* First check the type of the overall operation. We need only look at
6351 unary, binary and relational operations. */
6352 if (TREE_CODE_CLASS (code) == '1'
6353 || TREE_CODE_CLASS (code) == '2'
6354 || TREE_CODE_CLASS (code) == '<')
6356 mode = TYPE_MODE (TREE_TYPE (exp));
6357 if (GET_MODE_CLASS (mode) == MODE_INT
6358 && mode > MAX_INTEGER_COMPUTATION_MODE)
6359 internal_error ("unsupported wide integer operation");
6362 /* Check operand of a unary op. */
6363 if (TREE_CODE_CLASS (code) == '1')
6365 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6366 if (GET_MODE_CLASS (mode) == MODE_INT
6367 && mode > MAX_INTEGER_COMPUTATION_MODE)
6368 internal_error ("unsupported wide integer operation");
6371 /* Check operands of a binary/comparison op. */
6372 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6374 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6375 if (GET_MODE_CLASS (mode) == MODE_INT
6376 && mode > MAX_INTEGER_COMPUTATION_MODE)
6377 internal_error ("unsupported wide integer operation");
6379 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6380 if (GET_MODE_CLASS (mode) == MODE_INT
6381 && mode > MAX_INTEGER_COMPUTATION_MODE)
6382 internal_error ("unsupported wide integer operation");
6387 /* Return the highest power of two that EXP is known to be a multiple of.
6388 This is used in updating alignment of MEMs in array references. */
6390 static unsigned HOST_WIDE_INT
6391 highest_pow2_factor (tree exp)
6393 unsigned HOST_WIDE_INT c0, c1;
6395 switch (TREE_CODE (exp))
6398 /* We can find the lowest bit that's a one. If the low
6399 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6400 We need to handle this case since we can find it in a COND_EXPR,
6401 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6402 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6404 if (TREE_CONSTANT_OVERFLOW (exp))
6405 return BIGGEST_ALIGNMENT;
6408 /* Note: tree_low_cst is intentionally not used here,
6409 we don't care about the upper bits. */
6410 c0 = TREE_INT_CST_LOW (exp);
6412 return c0 ? c0 : BIGGEST_ALIGNMENT;
6416 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6417 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6418 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6419 return MIN (c0, c1);
6422 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6423 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6426 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6428 if (integer_pow2p (TREE_OPERAND (exp, 1))
6429 && host_integerp (TREE_OPERAND (exp, 1), 1))
6431 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6432 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6433 return MAX (1, c0 / c1);
6437 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6438 case SAVE_EXPR: case WITH_RECORD_EXPR:
6439 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6442 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6445 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6446 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6447 return MIN (c0, c1);
6456 /* Similar, except that it is known that the expression must be a multiple
6457 of the alignment of TYPE. */
6459 static unsigned HOST_WIDE_INT
6460 highest_pow2_factor_for_type (tree type, tree exp)
6462 unsigned HOST_WIDE_INT type_align, factor;
6464 factor = highest_pow2_factor (exp);
6465 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6466 return MAX (factor, type_align);
6469 /* Return an object on the placeholder list that matches EXP, a
6470 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6471 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6472 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6473 is a location which initially points to a starting location in the
6474 placeholder list (zero means start of the list) and where a pointer into
6475 the placeholder list at which the object is found is placed. */
6478 find_placeholder (tree exp, tree *plist)
6480 tree type = TREE_TYPE (exp);
6481 tree placeholder_expr;
6483 for (placeholder_expr
6484 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6485 placeholder_expr != 0;
6486 placeholder_expr = TREE_CHAIN (placeholder_expr))
6488 tree need_type = TYPE_MAIN_VARIANT (type);
6491 /* Find the outermost reference that is of the type we want. If none,
6492 see if any object has a type that is a pointer to the type we
6494 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6495 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6496 || TREE_CODE (elt) == COND_EXPR)
6497 ? TREE_OPERAND (elt, 1)
6498 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6499 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6500 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6501 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6502 ? TREE_OPERAND (elt, 0) : 0))
6503 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6506 *plist = placeholder_expr;
6510 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6512 = ((TREE_CODE (elt) == COMPOUND_EXPR
6513 || TREE_CODE (elt) == COND_EXPR)
6514 ? TREE_OPERAND (elt, 1)
6515 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6516 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6517 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6518 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6519 ? TREE_OPERAND (elt, 0) : 0))
6520 if (POINTER_TYPE_P (TREE_TYPE (elt))
6521 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6525 *plist = placeholder_expr;
6526 return build1 (INDIRECT_REF, need_type, elt);
6533 /* expand_expr: generate code for computing expression EXP.
6534 An rtx for the computed value is returned. The value is never null.
6535 In the case of a void EXP, const0_rtx is returned.
6537 The value may be stored in TARGET if TARGET is nonzero.
6538 TARGET is just a suggestion; callers must assume that
6539 the rtx returned may not be the same as TARGET.
6541 If TARGET is CONST0_RTX, it means that the value will be ignored.
6543 If TMODE is not VOIDmode, it suggests generating the
6544 result in mode TMODE. But this is done only when convenient.
6545 Otherwise, TMODE is ignored and the value generated in its natural mode.
6546 TMODE is just a suggestion; callers must assume that
6547 the rtx returned may not have mode TMODE.
6549 Note that TARGET may have neither TMODE nor MODE. In that case, it
6550 probably will not be used.
6552 If MODIFIER is EXPAND_SUM then when EXP is an addition
6553 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6554 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6555 products as above, or REG or MEM, or constant.
6556 Ordinarily in such cases we would output mul or add instructions
6557 and then return a pseudo reg containing the sum.
6559 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6560 it also marks a label as absolutely required (it can't be dead).
6561 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6562 This is used for outputting expressions used in initializers.
6564 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6565 with a constant address even if that address is not normally legitimate.
6566 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6568 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6569 a call parameter. Such targets require special care as we haven't yet
6570 marked TARGET so that it's safe from being trashed by libcalls. We
6571 don't want to use TARGET for anything but the final result;
6572 Intermediate values must go elsewhere. Additionally, calls to
6573 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6576 expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
6579 tree type = TREE_TYPE (exp);
6580 int unsignedp = TREE_UNSIGNED (type);
6581 enum machine_mode mode;
6582 enum tree_code code = TREE_CODE (exp);
6584 rtx subtarget, original_target;
6588 /* Handle ERROR_MARK before anybody tries to access its type. */
6589 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6591 op0 = CONST0_RTX (tmode);
6597 mode = TYPE_MODE (type);
6598 /* Use subtarget as the target for operand 0 of a binary operation. */
6599 subtarget = get_subtarget (target);
6600 original_target = target;
6601 ignore = (target == const0_rtx
6602 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6603 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6604 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6605 && TREE_CODE (type) == VOID_TYPE));
6607 /* If we are going to ignore this result, we need only do something
6608 if there is a side-effect somewhere in the expression. If there
6609 is, short-circuit the most common cases here. Note that we must
6610 not call expand_expr with anything but const0_rtx in case this
6611 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6615 if (! TREE_SIDE_EFFECTS (exp))
6618 /* Ensure we reference a volatile object even if value is ignored, but
6619 don't do this if all we are doing is taking its address. */
6620 if (TREE_THIS_VOLATILE (exp)
6621 && TREE_CODE (exp) != FUNCTION_DECL
6622 && mode != VOIDmode && mode != BLKmode
6623 && modifier != EXPAND_CONST_ADDRESS)
6625 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6626 if (GET_CODE (temp) == MEM)
6627 temp = copy_to_reg (temp);
6631 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6632 || code == INDIRECT_REF || code == BUFFER_REF)
6633 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6636 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6637 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6639 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6640 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6643 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6644 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6645 /* If the second operand has no side effects, just evaluate
6647 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6649 else if (code == BIT_FIELD_REF)
6651 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6652 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6653 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6660 #ifdef MAX_INTEGER_COMPUTATION_MODE
6661 /* Only check stuff here if the mode we want is different from the mode
6662 of the expression; if it's the same, check_max_integer_computation_mode
6663 will handle it. Do we really need to check this stuff at all? */
6666 && GET_MODE (target) != mode
6667 && TREE_CODE (exp) != INTEGER_CST
6668 && TREE_CODE (exp) != PARM_DECL
6669 && TREE_CODE (exp) != ARRAY_REF
6670 && TREE_CODE (exp) != ARRAY_RANGE_REF
6671 && TREE_CODE (exp) != COMPONENT_REF
6672 && TREE_CODE (exp) != BIT_FIELD_REF
6673 && TREE_CODE (exp) != INDIRECT_REF
6674 && TREE_CODE (exp) != CALL_EXPR
6675 && TREE_CODE (exp) != VAR_DECL
6676 && TREE_CODE (exp) != RTL_EXPR)
6678 enum machine_mode mode = GET_MODE (target);
6680 if (GET_MODE_CLASS (mode) == MODE_INT
6681 && mode > MAX_INTEGER_COMPUTATION_MODE)
6682 internal_error ("unsupported wide integer operation");
6686 && TREE_CODE (exp) != INTEGER_CST
6687 && TREE_CODE (exp) != PARM_DECL
6688 && TREE_CODE (exp) != ARRAY_REF
6689 && TREE_CODE (exp) != ARRAY_RANGE_REF
6690 && TREE_CODE (exp) != COMPONENT_REF
6691 && TREE_CODE (exp) != BIT_FIELD_REF
6692 && TREE_CODE (exp) != INDIRECT_REF
6693 && TREE_CODE (exp) != VAR_DECL
6694 && TREE_CODE (exp) != CALL_EXPR
6695 && TREE_CODE (exp) != RTL_EXPR
6696 && GET_MODE_CLASS (tmode) == MODE_INT
6697 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6698 internal_error ("unsupported wide integer operation");
6700 check_max_integer_computation_mode (exp);
6703 /* If will do cse, generate all results into pseudo registers
6704 since 1) that allows cse to find more things
6705 and 2) otherwise cse could produce an insn the machine
6706 cannot support. An exception is a CONSTRUCTOR into a multi-word
6707 MEM: that's much more likely to be most efficient into the MEM.
6708 Another is a CALL_EXPR which must return in memory. */
6710 if (! cse_not_expected && mode != BLKmode && target
6711 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6712 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6713 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6720 tree function = decl_function_context (exp);
6721 /* Labels in containing functions, or labels used from initializers,
6723 if (modifier == EXPAND_INITIALIZER
6724 || (function != current_function_decl
6725 && function != inline_function_decl
6727 temp = force_label_rtx (exp);
6729 temp = label_rtx (exp);
6731 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6732 if (function != current_function_decl
6733 && function != inline_function_decl && function != 0)
6734 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6739 if (!DECL_RTL_SET_P (exp))
6741 error_with_decl (exp, "prior parameter's size depends on `%s'");
6742 return CONST0_RTX (mode);
6745 /* ... fall through ... */
6748 /* If a static var's type was incomplete when the decl was written,
6749 but the type is complete now, lay out the decl now. */
6750 if (DECL_SIZE (exp) == 0
6751 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6752 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6753 layout_decl (exp, 0);
6755 /* ... fall through ... */
6759 if (DECL_RTL (exp) == 0)
6762 /* Ensure variable marked as used even if it doesn't go through
6763 a parser. If it hasn't be used yet, write out an external
6765 if (! TREE_USED (exp))
6767 assemble_external (exp);
6768 TREE_USED (exp) = 1;
6771 /* Show we haven't gotten RTL for this yet. */
6774 /* Handle variables inherited from containing functions. */
6775 context = decl_function_context (exp);
6777 /* We treat inline_function_decl as an alias for the current function
6778 because that is the inline function whose vars, types, etc.
6779 are being merged into the current function.
6780 See expand_inline_function. */
6782 if (context != 0 && context != current_function_decl
6783 && context != inline_function_decl
6784 /* If var is static, we don't need a static chain to access it. */
6785 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6786 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6790 /* Mark as non-local and addressable. */
6791 DECL_NONLOCAL (exp) = 1;
6792 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6794 (*lang_hooks.mark_addressable) (exp);
6795 if (GET_CODE (DECL_RTL (exp)) != MEM)
6797 addr = XEXP (DECL_RTL (exp), 0);
6798 if (GET_CODE (addr) == MEM)
6800 = replace_equiv_address (addr,
6801 fix_lexical_addr (XEXP (addr, 0), exp));
6803 addr = fix_lexical_addr (addr, exp);
6805 temp = replace_equiv_address (DECL_RTL (exp), addr);
6808 /* This is the case of an array whose size is to be determined
6809 from its initializer, while the initializer is still being parsed.
6812 else if (GET_CODE (DECL_RTL (exp)) == MEM
6813 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6814 temp = validize_mem (DECL_RTL (exp));
6816 /* If DECL_RTL is memory, we are in the normal case and either
6817 the address is not valid or it is not a register and -fforce-addr
6818 is specified, get the address into a register. */
6820 else if (GET_CODE (DECL_RTL (exp)) == MEM
6821 && modifier != EXPAND_CONST_ADDRESS
6822 && modifier != EXPAND_SUM
6823 && modifier != EXPAND_INITIALIZER
6824 && (! memory_address_p (DECL_MODE (exp),
6825 XEXP (DECL_RTL (exp), 0))
6827 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6828 temp = replace_equiv_address (DECL_RTL (exp),
6829 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6831 /* If we got something, return it. But first, set the alignment
6832 if the address is a register. */
6835 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6836 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6841 /* If the mode of DECL_RTL does not match that of the decl, it
6842 must be a promoted value. We return a SUBREG of the wanted mode,
6843 but mark it so that we know that it was already extended. */
6845 if (GET_CODE (DECL_RTL (exp)) == REG
6846 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6848 /* Get the signedness used for this variable. Ensure we get the
6849 same mode we got when the variable was declared. */
6850 if (GET_MODE (DECL_RTL (exp))
6851 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6852 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6855 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6856 SUBREG_PROMOTED_VAR_P (temp) = 1;
6857 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6861 return DECL_RTL (exp);
6864 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6865 TREE_INT_CST_HIGH (exp), mode);
6867 /* ??? If overflow is set, fold will have done an incomplete job,
6868 which can result in (plus xx (const_int 0)), which can get
6869 simplified by validate_replace_rtx during virtual register
6870 instantiation, which can result in unrecognizable insns.
6871 Avoid this by forcing all overflows into registers. */
6872 if (TREE_CONSTANT_OVERFLOW (exp)
6873 && modifier != EXPAND_INITIALIZER)
6874 temp = force_reg (mode, temp);
6879 return const_vector_from_tree (exp);
6882 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6885 /* If optimized, generate immediate CONST_DOUBLE
6886 which will be turned into memory by reload if necessary.
6888 We used to force a register so that loop.c could see it. But
6889 this does not allow gen_* patterns to perform optimizations with
6890 the constants. It also produces two insns in cases like "x = 1.0;".
6891 On most machines, floating-point constants are not permitted in
6892 many insns, so we'd end up copying it to a register in any case.
6894 Now, we do the copying in expand_binop, if appropriate. */
6895 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6896 TYPE_MODE (TREE_TYPE (exp)));
6899 /* Handle evaluating a complex constant in a CONCAT target. */
6900 if (original_target && GET_CODE (original_target) == CONCAT)
6902 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6905 rtarg = XEXP (original_target, 0);
6906 itarg = XEXP (original_target, 1);
6908 /* Move the real and imaginary parts separately. */
6909 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6910 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6913 emit_move_insn (rtarg, op0);
6915 emit_move_insn (itarg, op1);
6917 return original_target;
6920 /* ... fall through ... */
6923 temp = output_constant_def (exp, 1);
6925 /* temp contains a constant address.
6926 On RISC machines where a constant address isn't valid,
6927 make some insns to get that address into a register. */
6928 if (modifier != EXPAND_CONST_ADDRESS
6929 && modifier != EXPAND_INITIALIZER
6930 && modifier != EXPAND_SUM
6931 && (! memory_address_p (mode, XEXP (temp, 0))
6932 || flag_force_addr))
6933 return replace_equiv_address (temp,
6934 copy_rtx (XEXP (temp, 0)));
6937 case EXPR_WITH_FILE_LOCATION:
6940 location_t saved_loc = input_location;
6941 input_filename = EXPR_WFL_FILENAME (exp);
6942 input_line = EXPR_WFL_LINENO (exp);
6943 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6944 emit_line_note (input_location);
6945 /* Possibly avoid switching back and forth here. */
6946 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6947 input_location = saved_loc;
6952 context = decl_function_context (exp);
6954 /* If this SAVE_EXPR was at global context, assume we are an
6955 initialization function and move it into our context. */
6957 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6959 /* We treat inline_function_decl as an alias for the current function
6960 because that is the inline function whose vars, types, etc.
6961 are being merged into the current function.
6962 See expand_inline_function. */
6963 if (context == current_function_decl || context == inline_function_decl)
6966 /* If this is non-local, handle it. */
6969 /* The following call just exists to abort if the context is
6970 not of a containing function. */
6971 find_function_data (context);
6973 temp = SAVE_EXPR_RTL (exp);
6974 if (temp && GET_CODE (temp) == REG)
6976 put_var_into_stack (exp, /*rescan=*/true);
6977 temp = SAVE_EXPR_RTL (exp);
6979 if (temp == 0 || GET_CODE (temp) != MEM)
6982 replace_equiv_address (temp,
6983 fix_lexical_addr (XEXP (temp, 0), exp));
6985 if (SAVE_EXPR_RTL (exp) == 0)
6987 if (mode == VOIDmode)
6990 temp = assign_temp (build_qualified_type (type,
6992 | TYPE_QUAL_CONST)),
6995 SAVE_EXPR_RTL (exp) = temp;
6996 if (!optimize && GET_CODE (temp) == REG)
6997 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7000 /* If the mode of TEMP does not match that of the expression, it
7001 must be a promoted value. We pass store_expr a SUBREG of the
7002 wanted mode but mark it so that we know that it was already
7005 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7007 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7008 promote_mode (type, mode, &unsignedp, 0);
7009 SUBREG_PROMOTED_VAR_P (temp) = 1;
7010 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7013 if (temp == const0_rtx)
7014 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7016 store_expr (TREE_OPERAND (exp, 0), temp,
7017 modifier == EXPAND_STACK_PARM ? 2 : 0);
7019 TREE_USED (exp) = 1;
7022 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7023 must be a promoted value. We return a SUBREG of the wanted mode,
7024 but mark it so that we know that it was already extended. */
7026 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7027 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7029 /* Compute the signedness and make the proper SUBREG. */
7030 promote_mode (type, mode, &unsignedp, 0);
7031 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7032 SUBREG_PROMOTED_VAR_P (temp) = 1;
7033 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7037 return SAVE_EXPR_RTL (exp);
7042 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7043 TREE_OPERAND (exp, 0)
7044 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7048 case PLACEHOLDER_EXPR:
7050 tree old_list = placeholder_list;
7051 tree placeholder_expr = 0;
7053 exp = find_placeholder (exp, &placeholder_expr);
7057 placeholder_list = TREE_CHAIN (placeholder_expr);
7058 temp = expand_expr (exp, original_target, tmode, modifier);
7059 placeholder_list = old_list;
7063 case WITH_RECORD_EXPR:
7064 /* Put the object on the placeholder list, expand our first operand,
7065 and pop the list. */
7066 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7068 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7070 placeholder_list = TREE_CHAIN (placeholder_list);
7074 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7075 expand_goto (TREE_OPERAND (exp, 0));
7077 expand_computed_goto (TREE_OPERAND (exp, 0));
7081 expand_exit_loop_if_false (NULL,
7082 invert_truthvalue (TREE_OPERAND (exp, 0)));
7085 case LABELED_BLOCK_EXPR:
7086 if (LABELED_BLOCK_BODY (exp))
7087 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7088 /* Should perhaps use expand_label, but this is simpler and safer. */
7089 do_pending_stack_adjust ();
7090 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7093 case EXIT_BLOCK_EXPR:
7094 if (EXIT_BLOCK_RETURN (exp))
7095 sorry ("returned value in block_exit_expr");
7096 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7101 expand_start_loop (1);
7102 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7110 tree vars = TREE_OPERAND (exp, 0);
7112 /* Need to open a binding contour here because
7113 if there are any cleanups they must be contained here. */
7114 expand_start_bindings (2);
7116 /* Mark the corresponding BLOCK for output in its proper place. */
7117 if (TREE_OPERAND (exp, 2) != 0
7118 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7119 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7121 /* If VARS have not yet been expanded, expand them now. */
7124 if (!DECL_RTL_SET_P (vars))
7126 expand_decl_init (vars);
7127 vars = TREE_CHAIN (vars);
7130 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7132 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7138 if (RTL_EXPR_SEQUENCE (exp))
7140 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7142 emit_insn (RTL_EXPR_SEQUENCE (exp));
7143 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7145 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7146 free_temps_for_rtl_expr (exp);
7147 return RTL_EXPR_RTL (exp);
7150 /* If we don't need the result, just ensure we evaluate any
7156 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7157 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7162 /* All elts simple constants => refer to a constant in memory. But
7163 if this is a non-BLKmode mode, let it store a field at a time
7164 since that should make a CONST_INT or CONST_DOUBLE when we
7165 fold. Likewise, if we have a target we can use, it is best to
7166 store directly into the target unless the type is large enough
7167 that memcpy will be used. If we are making an initializer and
7168 all operands are constant, put it in memory as well.
7170 FIXME: Avoid trying to fill vector constructors piece-meal.
7171 Output them with output_constant_def below unless we're sure
7172 they're zeros. This should go away when vector initializers
7173 are treated like VECTOR_CST instead of arrays.
7175 else if ((TREE_STATIC (exp)
7176 && ((mode == BLKmode
7177 && ! (target != 0 && safe_from_p (target, exp, 1)))
7178 || TREE_ADDRESSABLE (exp)
7179 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7180 && (! MOVE_BY_PIECES_P
7181 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7183 && ((TREE_CODE (type) == VECTOR_TYPE
7184 && !is_zeros_p (exp))
7185 || ! mostly_zeros_p (exp)))))
7186 || ((modifier == EXPAND_INITIALIZER
7187 || modifier == EXPAND_CONST_ADDRESS)
7188 && TREE_CONSTANT (exp)))
7190 rtx constructor = output_constant_def (exp, 1);
7192 if (modifier != EXPAND_CONST_ADDRESS
7193 && modifier != EXPAND_INITIALIZER
7194 && modifier != EXPAND_SUM)
7195 constructor = validize_mem (constructor);
7201 /* Handle calls that pass values in multiple non-contiguous
7202 locations. The Irix 6 ABI has examples of this. */
7203 if (target == 0 || ! safe_from_p (target, exp, 1)
7204 || GET_CODE (target) == PARALLEL
7205 || modifier == EXPAND_STACK_PARM)
7207 = assign_temp (build_qualified_type (type,
7209 | (TREE_READONLY (exp)
7210 * TYPE_QUAL_CONST))),
7211 0, TREE_ADDRESSABLE (exp), 1);
7213 store_constructor (exp, target, 0, int_expr_size (exp));
7219 tree exp1 = TREE_OPERAND (exp, 0);
7221 tree string = string_constant (exp1, &index);
7223 /* Try to optimize reads from const strings. */
7225 && TREE_CODE (string) == STRING_CST
7226 && TREE_CODE (index) == INTEGER_CST
7227 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7228 && GET_MODE_CLASS (mode) == MODE_INT
7229 && GET_MODE_SIZE (mode) == 1
7230 && modifier != EXPAND_WRITE)
7231 return gen_int_mode (TREE_STRING_POINTER (string)
7232 [TREE_INT_CST_LOW (index)], mode);
7234 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7235 op0 = memory_address (mode, op0);
7236 temp = gen_rtx_MEM (mode, op0);
7237 set_mem_attributes (temp, exp, 0);
7239 /* If we are writing to this object and its type is a record with
7240 readonly fields, we must mark it as readonly so it will
7241 conflict with readonly references to those fields. */
7242 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7243 RTX_UNCHANGING_P (temp) = 1;
7249 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7253 tree array = TREE_OPERAND (exp, 0);
7254 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7255 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7256 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7259 /* Optimize the special-case of a zero lower bound.
7261 We convert the low_bound to sizetype to avoid some problems
7262 with constant folding. (E.g. suppose the lower bound is 1,
7263 and its mode is QI. Without the conversion, (ARRAY
7264 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7265 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7267 if (! integer_zerop (low_bound))
7268 index = size_diffop (index, convert (sizetype, low_bound));
7270 /* Fold an expression like: "foo"[2].
7271 This is not done in fold so it won't happen inside &.
7272 Don't fold if this is for wide characters since it's too
7273 difficult to do correctly and this is a very rare case. */
7275 if (modifier != EXPAND_CONST_ADDRESS
7276 && modifier != EXPAND_INITIALIZER
7277 && modifier != EXPAND_MEMORY
7278 && TREE_CODE (array) == STRING_CST
7279 && TREE_CODE (index) == INTEGER_CST
7280 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7281 && GET_MODE_CLASS (mode) == MODE_INT
7282 && GET_MODE_SIZE (mode) == 1)
7283 return gen_int_mode (TREE_STRING_POINTER (array)
7284 [TREE_INT_CST_LOW (index)], mode);
7286 /* If this is a constant index into a constant array,
7287 just get the value from the array. Handle both the cases when
7288 we have an explicit constructor and when our operand is a variable
7289 that was declared const. */
7291 if (modifier != EXPAND_CONST_ADDRESS
7292 && modifier != EXPAND_INITIALIZER
7293 && modifier != EXPAND_MEMORY
7294 && TREE_CODE (array) == CONSTRUCTOR
7295 && ! TREE_SIDE_EFFECTS (array)
7296 && TREE_CODE (index) == INTEGER_CST
7297 && 0 > compare_tree_int (index,
7298 list_length (CONSTRUCTOR_ELTS
7299 (TREE_OPERAND (exp, 0)))))
7303 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7304 i = TREE_INT_CST_LOW (index);
7305 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7309 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7313 else if (optimize >= 1
7314 && modifier != EXPAND_CONST_ADDRESS
7315 && modifier != EXPAND_INITIALIZER
7316 && modifier != EXPAND_MEMORY
7317 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7318 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7319 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7321 if (TREE_CODE (index) == INTEGER_CST)
7323 tree init = DECL_INITIAL (array);
7325 if (TREE_CODE (init) == CONSTRUCTOR)
7329 for (elem = CONSTRUCTOR_ELTS (init);
7331 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7332 elem = TREE_CHAIN (elem))
7335 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7336 return expand_expr (fold (TREE_VALUE (elem)), target,
7339 else if (TREE_CODE (init) == STRING_CST
7340 && 0 > compare_tree_int (index,
7341 TREE_STRING_LENGTH (init)))
7343 tree type = TREE_TYPE (TREE_TYPE (init));
7344 enum machine_mode mode = TYPE_MODE (type);
7346 if (GET_MODE_CLASS (mode) == MODE_INT
7347 && GET_MODE_SIZE (mode) == 1)
7348 return gen_int_mode (TREE_STRING_POINTER (init)
7349 [TREE_INT_CST_LOW (index)], mode);
7354 goto normal_inner_ref;
7357 /* If the operand is a CONSTRUCTOR, we can just extract the
7358 appropriate field if it is present. */
7359 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7363 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7364 elt = TREE_CHAIN (elt))
7365 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7366 /* We can normally use the value of the field in the
7367 CONSTRUCTOR. However, if this is a bitfield in
7368 an integral mode that we can fit in a HOST_WIDE_INT,
7369 we must mask only the number of bits in the bitfield,
7370 since this is done implicitly by the constructor. If
7371 the bitfield does not meet either of those conditions,
7372 we can't do this optimization. */
7373 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7374 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7376 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7377 <= HOST_BITS_PER_WIDE_INT))))
7379 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7380 && modifier == EXPAND_STACK_PARM)
7382 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7383 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7385 HOST_WIDE_INT bitsize
7386 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7387 enum machine_mode imode
7388 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7390 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7392 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7393 op0 = expand_and (imode, op0, op1, target);
7398 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7401 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7403 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7411 goto normal_inner_ref;
7414 case ARRAY_RANGE_REF:
7417 enum machine_mode mode1;
7418 HOST_WIDE_INT bitsize, bitpos;
7421 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7422 &mode1, &unsignedp, &volatilep);
7425 /* If we got back the original object, something is wrong. Perhaps
7426 we are evaluating an expression too early. In any event, don't
7427 infinitely recurse. */
7431 /* If TEM's type is a union of variable size, pass TARGET to the inner
7432 computation, since it will need a temporary and TARGET is known
7433 to have to do. This occurs in unchecked conversion in Ada. */
7437 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7438 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7440 && modifier != EXPAND_STACK_PARM
7441 ? target : NULL_RTX),
7443 (modifier == EXPAND_INITIALIZER
7444 || modifier == EXPAND_CONST_ADDRESS
7445 || modifier == EXPAND_STACK_PARM)
7446 ? modifier : EXPAND_NORMAL);
7448 /* If this is a constant, put it into a register if it is a
7449 legitimate constant and OFFSET is 0 and memory if it isn't. */
7450 if (CONSTANT_P (op0))
7452 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7453 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7455 op0 = force_reg (mode, op0);
7457 op0 = validize_mem (force_const_mem (mode, op0));
7460 /* Otherwise, if this object not in memory and we either have an
7461 offset or a BLKmode result, put it there. This case can't occur in
7462 C, but can in Ada if we have unchecked conversion of an expression
7463 from a scalar type to an array or record type or for an
7464 ARRAY_RANGE_REF whose type is BLKmode. */
7465 else if (GET_CODE (op0) != MEM
7467 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7469 /* If the operand is a SAVE_EXPR, we can deal with this by
7470 forcing the SAVE_EXPR into memory. */
7471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7473 put_var_into_stack (TREE_OPERAND (exp, 0),
7475 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7480 = build_qualified_type (TREE_TYPE (tem),
7481 (TYPE_QUALS (TREE_TYPE (tem))
7482 | TYPE_QUAL_CONST));
7483 rtx memloc = assign_temp (nt, 1, 1, 1);
7485 emit_move_insn (memloc, op0);
7492 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7495 if (GET_CODE (op0) != MEM)
7498 #ifdef POINTERS_EXTEND_UNSIGNED
7499 if (GET_MODE (offset_rtx) != Pmode)
7500 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7502 if (GET_MODE (offset_rtx) != ptr_mode)
7503 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7506 /* A constant address in OP0 can have VOIDmode, we must not try
7507 to call force_reg for that case. Avoid that case. */
7508 if (GET_CODE (op0) == MEM
7509 && GET_MODE (op0) == BLKmode
7510 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7512 && (bitpos % bitsize) == 0
7513 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7514 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7516 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7520 op0 = offset_address (op0, offset_rtx,
7521 highest_pow2_factor (offset));
7524 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7525 record its alignment as BIGGEST_ALIGNMENT. */
7526 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7527 && is_aligning_offset (offset, tem))
7528 set_mem_align (op0, BIGGEST_ALIGNMENT);
7530 /* Don't forget about volatility even if this is a bitfield. */
7531 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7533 if (op0 == orig_op0)
7534 op0 = copy_rtx (op0);
7536 MEM_VOLATILE_P (op0) = 1;
7539 /* The following code doesn't handle CONCAT.
7540 Assume only bitpos == 0 can be used for CONCAT, due to
7541 one element arrays having the same mode as its element. */
7542 if (GET_CODE (op0) == CONCAT)
7544 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7549 /* In cases where an aligned union has an unaligned object
7550 as a field, we might be extracting a BLKmode value from
7551 an integer-mode (e.g., SImode) object. Handle this case
7552 by doing the extract into an object as wide as the field
7553 (which we know to be the width of a basic mode), then
7554 storing into memory, and changing the mode to BLKmode. */
7555 if (mode1 == VOIDmode
7556 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7557 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7558 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7559 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7560 && modifier != EXPAND_CONST_ADDRESS
7561 && modifier != EXPAND_INITIALIZER)
7562 /* If the field isn't aligned enough to fetch as a memref,
7563 fetch it as a bit field. */
7564 || (mode1 != BLKmode
7565 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7566 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7567 && ((modifier == EXPAND_CONST_ADDRESS
7568 || modifier == EXPAND_INITIALIZER)
7570 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7571 || (bitpos % BITS_PER_UNIT != 0)))
7572 /* If the type and the field are a constant size and the
7573 size of the type isn't the same size as the bitfield,
7574 we must use bitfield operations. */
7576 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7578 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7581 enum machine_mode ext_mode = mode;
7583 if (ext_mode == BLKmode
7584 && ! (target != 0 && GET_CODE (op0) == MEM
7585 && GET_CODE (target) == MEM
7586 && bitpos % BITS_PER_UNIT == 0))
7587 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7589 if (ext_mode == BLKmode)
7591 /* In this case, BITPOS must start at a byte boundary and
7592 TARGET, if specified, must be a MEM. */
7593 if (GET_CODE (op0) != MEM
7594 || (target != 0 && GET_CODE (target) != MEM)
7595 || bitpos % BITS_PER_UNIT != 0)
7598 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7600 target = assign_temp (type, 0, 1, 1);
7602 emit_block_move (target, op0,
7603 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7605 (modifier == EXPAND_STACK_PARM
7606 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7611 op0 = validize_mem (op0);
7613 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7614 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7616 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7617 (modifier == EXPAND_STACK_PARM
7618 ? NULL_RTX : target),
7620 int_size_in_bytes (TREE_TYPE (tem)));
7622 /* If the result is a record type and BITSIZE is narrower than
7623 the mode of OP0, an integral mode, and this is a big endian
7624 machine, we must put the field into the high-order bits. */
7625 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7626 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7627 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7628 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7629 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7633 if (mode == BLKmode)
7635 rtx new = assign_temp (build_qualified_type
7636 ((*lang_hooks.types.type_for_mode)
7638 TYPE_QUAL_CONST), 0, 1, 1);
7640 emit_move_insn (new, op0);
7641 op0 = copy_rtx (new);
7642 PUT_MODE (op0, BLKmode);
7643 set_mem_attributes (op0, exp, 1);
7649 /* If the result is BLKmode, use that to access the object
7651 if (mode == BLKmode)
7654 /* Get a reference to just this component. */
7655 if (modifier == EXPAND_CONST_ADDRESS
7656 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7657 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7659 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7661 if (op0 == orig_op0)
7662 op0 = copy_rtx (op0);
7664 set_mem_attributes (op0, exp, 0);
7665 if (GET_CODE (XEXP (op0, 0)) == REG)
7666 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7668 MEM_VOLATILE_P (op0) |= volatilep;
7669 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7670 || modifier == EXPAND_CONST_ADDRESS
7671 || modifier == EXPAND_INITIALIZER)
7673 else if (target == 0)
7674 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7676 convert_move (target, op0, unsignedp);
7682 rtx insn, before = get_last_insn (), vtbl_ref;
7684 /* Evaluate the interior expression. */
7685 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7688 /* Get or create an instruction off which to hang a note. */
7689 if (REG_P (subtarget))
7692 insn = get_last_insn ();
7695 if (! INSN_P (insn))
7696 insn = prev_nonnote_insn (insn);
7700 target = gen_reg_rtx (GET_MODE (subtarget));
7701 insn = emit_move_insn (target, subtarget);
7704 /* Collect the data for the note. */
7705 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7706 vtbl_ref = plus_constant (vtbl_ref,
7707 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7708 /* Discard the initial CONST that was added. */
7709 vtbl_ref = XEXP (vtbl_ref, 0);
7712 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7717 /* Intended for a reference to a buffer of a file-object in Pascal.
7718 But it's not certain that a special tree code will really be
7719 necessary for these. INDIRECT_REF might work for them. */
7725 /* Pascal set IN expression.
7728 rlo = set_low - (set_low%bits_per_word);
7729 the_word = set [ (index - rlo)/bits_per_word ];
7730 bit_index = index % bits_per_word;
7731 bitmask = 1 << bit_index;
7732 return !!(the_word & bitmask); */
7734 tree set = TREE_OPERAND (exp, 0);
7735 tree index = TREE_OPERAND (exp, 1);
7736 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7737 tree set_type = TREE_TYPE (set);
7738 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7739 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7740 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7741 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7742 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7743 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7744 rtx setaddr = XEXP (setval, 0);
7745 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7747 rtx diff, quo, rem, addr, bit, result;
7749 /* If domain is empty, answer is no. Likewise if index is constant
7750 and out of bounds. */
7751 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7752 && TREE_CODE (set_low_bound) == INTEGER_CST
7753 && tree_int_cst_lt (set_high_bound, set_low_bound))
7754 || (TREE_CODE (index) == INTEGER_CST
7755 && TREE_CODE (set_low_bound) == INTEGER_CST
7756 && tree_int_cst_lt (index, set_low_bound))
7757 || (TREE_CODE (set_high_bound) == INTEGER_CST
7758 && TREE_CODE (index) == INTEGER_CST
7759 && tree_int_cst_lt (set_high_bound, index))))
7763 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7765 /* If we get here, we have to generate the code for both cases
7766 (in range and out of range). */
7768 op0 = gen_label_rtx ();
7769 op1 = gen_label_rtx ();
7771 if (! (GET_CODE (index_val) == CONST_INT
7772 && GET_CODE (lo_r) == CONST_INT))
7773 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7774 GET_MODE (index_val), iunsignedp, op1);
7776 if (! (GET_CODE (index_val) == CONST_INT
7777 && GET_CODE (hi_r) == CONST_INT))
7778 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7779 GET_MODE (index_val), iunsignedp, op1);
7781 /* Calculate the element number of bit zero in the first word
7783 if (GET_CODE (lo_r) == CONST_INT)
7784 rlow = GEN_INT (INTVAL (lo_r)
7785 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7787 rlow = expand_binop (index_mode, and_optab, lo_r,
7788 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7789 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7791 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7792 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7794 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7795 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7796 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7797 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7799 addr = memory_address (byte_mode,
7800 expand_binop (index_mode, add_optab, diff,
7801 setaddr, NULL_RTX, iunsignedp,
7804 /* Extract the bit we want to examine. */
7805 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7806 gen_rtx_MEM (byte_mode, addr),
7807 make_tree (TREE_TYPE (index), rem),
7809 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7810 GET_MODE (target) == byte_mode ? target : 0,
7811 1, OPTAB_LIB_WIDEN);
7813 if (result != target)
7814 convert_move (target, result, 1);
7816 /* Output the code to handle the out-of-range case. */
7819 emit_move_insn (target, const0_rtx);
7824 case WITH_CLEANUP_EXPR:
7825 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7827 WITH_CLEANUP_EXPR_RTL (exp)
7828 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7829 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7830 CLEANUP_EH_ONLY (exp));
7832 /* That's it for this cleanup. */
7833 TREE_OPERAND (exp, 1) = 0;
7835 return WITH_CLEANUP_EXPR_RTL (exp);
7837 case CLEANUP_POINT_EXPR:
7839 /* Start a new binding layer that will keep track of all cleanup
7840 actions to be performed. */
7841 expand_start_bindings (2);
7843 target_temp_slot_level = temp_slot_level;
7845 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7846 /* If we're going to use this value, load it up now. */
7848 op0 = force_not_mem (op0);
7849 preserve_temp_slots (op0);
7850 expand_end_bindings (NULL_TREE, 0, 0);
7855 /* Check for a built-in function. */
7856 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7857 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7859 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7861 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7862 == BUILT_IN_FRONTEND)
7863 return (*lang_hooks.expand_expr) (exp, original_target,
7866 return expand_builtin (exp, target, subtarget, tmode, ignore);
7869 return expand_call (exp, target, ignore);
7871 case NON_LVALUE_EXPR:
7874 case REFERENCE_EXPR:
7875 if (TREE_OPERAND (exp, 0) == error_mark_node)
7878 if (TREE_CODE (type) == UNION_TYPE)
7880 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7882 /* If both input and output are BLKmode, this conversion isn't doing
7883 anything except possibly changing memory attribute. */
7884 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7886 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7889 result = copy_rtx (result);
7890 set_mem_attributes (result, exp, 0);
7895 target = assign_temp (type, 0, 1, 1);
7897 if (GET_CODE (target) == MEM)
7898 /* Store data into beginning of memory target. */
7899 store_expr (TREE_OPERAND (exp, 0),
7900 adjust_address (target, TYPE_MODE (valtype), 0),
7901 modifier == EXPAND_STACK_PARM ? 2 : 0);
7903 else if (GET_CODE (target) == REG)
7904 /* Store this field into a union of the proper type. */
7905 store_field (target,
7906 MIN ((int_size_in_bytes (TREE_TYPE
7907 (TREE_OPERAND (exp, 0)))
7909 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7910 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7911 VOIDmode, 0, type, 0);
7915 /* Return the entire union. */
7919 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7921 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7924 /* If the signedness of the conversion differs and OP0 is
7925 a promoted SUBREG, clear that indication since we now
7926 have to do the proper extension. */
7927 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7928 && GET_CODE (op0) == SUBREG)
7929 SUBREG_PROMOTED_VAR_P (op0) = 0;
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7935 if (GET_MODE (op0) == mode)
7938 /* If OP0 is a constant, just convert it into the proper mode. */
7939 if (CONSTANT_P (op0))
7941 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7942 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7944 if (modifier == EXPAND_INITIALIZER)
7945 return simplify_gen_subreg (mode, op0, inner_mode,
7946 subreg_lowpart_offset (mode,
7949 return convert_modes (mode, inner_mode, op0,
7950 TREE_UNSIGNED (inner_type));
7953 if (modifier == EXPAND_INITIALIZER)
7954 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7958 convert_to_mode (mode, op0,
7959 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7961 convert_move (target, op0,
7962 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7965 case VIEW_CONVERT_EXPR:
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7968 /* If the input and output modes are both the same, we are done.
7969 Otherwise, if neither mode is BLKmode and both are integral and within
7970 a word, we can use gen_lowpart. If neither is true, make sure the
7971 operand is in memory and convert the MEM to the new mode. */
7972 if (TYPE_MODE (type) == GET_MODE (op0))
7974 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7975 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7976 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7977 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7978 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7979 op0 = gen_lowpart (TYPE_MODE (type), op0);
7980 else if (GET_CODE (op0) != MEM)
7982 /* If the operand is not a MEM, force it into memory. Since we
7983 are going to be be changing the mode of the MEM, don't call
7984 force_const_mem for constants because we don't allow pool
7985 constants to change mode. */
7986 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7988 if (TREE_ADDRESSABLE (exp))
7991 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7993 = assign_stack_temp_for_type
7994 (TYPE_MODE (inner_type),
7995 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7997 emit_move_insn (target, op0);
8001 /* At this point, OP0 is in the correct mode. If the output type is such
8002 that the operand is known to be aligned, indicate that it is.
8003 Otherwise, we need only be concerned about alignment for non-BLKmode
8005 if (GET_CODE (op0) == MEM)
8007 op0 = copy_rtx (op0);
8009 if (TYPE_ALIGN_OK (type))
8010 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8011 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8012 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8014 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8015 HOST_WIDE_INT temp_size
8016 = MAX (int_size_in_bytes (inner_type),
8017 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8018 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8019 temp_size, 0, type);
8020 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8022 if (TREE_ADDRESSABLE (exp))
8025 if (GET_MODE (op0) == BLKmode)
8026 emit_block_move (new_with_op0_mode, op0,
8027 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8028 (modifier == EXPAND_STACK_PARM
8029 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8031 emit_move_insn (new_with_op0_mode, op0);
8036 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8042 this_optab = ! unsignedp && flag_trapv
8043 && (GET_MODE_CLASS (mode) == MODE_INT)
8044 ? addv_optab : add_optab;
8046 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8047 something else, make sure we add the register to the constant and
8048 then to the other thing. This case can occur during strength
8049 reduction and doing it this way will produce better code if the
8050 frame pointer or argument pointer is eliminated.
8052 fold-const.c will ensure that the constant is always in the inner
8053 PLUS_EXPR, so the only case we need to do anything about is if
8054 sp, ap, or fp is our second argument, in which case we must swap
8055 the innermost first argument and our second argument. */
8057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8058 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8059 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8060 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8061 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8062 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8064 tree t = TREE_OPERAND (exp, 1);
8066 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8067 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8070 /* If the result is to be ptr_mode and we are adding an integer to
8071 something, we might be forming a constant. So try to use
8072 plus_constant. If it produces a sum and we can't accept it,
8073 use force_operand. This allows P = &ARR[const] to generate
8074 efficient code on machines where a SYMBOL_REF is not a valid
8077 If this is an EXPAND_SUM call, always return the sum. */
8078 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8079 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8081 if (modifier == EXPAND_STACK_PARM)
8083 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8084 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8085 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8089 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8091 /* Use immed_double_const to ensure that the constant is
8092 truncated according to the mode of OP1, then sign extended
8093 to a HOST_WIDE_INT. Using the constant directly can result
8094 in non-canonical RTL in a 64x32 cross compile. */
8096 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8098 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8099 op1 = plus_constant (op1, INTVAL (constant_part));
8100 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8101 op1 = force_operand (op1, target);
8105 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8106 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8107 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8111 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8112 (modifier == EXPAND_INITIALIZER
8113 ? EXPAND_INITIALIZER : EXPAND_SUM));
8114 if (! CONSTANT_P (op0))
8116 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8117 VOIDmode, modifier);
8118 /* Don't go to both_summands if modifier
8119 says it's not right to return a PLUS. */
8120 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8124 /* Use immed_double_const to ensure that the constant is
8125 truncated according to the mode of OP1, then sign extended
8126 to a HOST_WIDE_INT. Using the constant directly can result
8127 in non-canonical RTL in a 64x32 cross compile. */
8129 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8131 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8132 op0 = plus_constant (op0, INTVAL (constant_part));
8133 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8134 op0 = force_operand (op0, target);
8139 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8142 /* No sense saving up arithmetic to be done
8143 if it's all in the wrong mode to form part of an address.
8144 And force_operand won't know whether to sign-extend or
8146 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8147 || mode != ptr_mode)
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8150 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8151 TREE_OPERAND (exp, 1), 0))
8152 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8155 if (op0 == const0_rtx)
8157 if (op1 == const0_rtx)
8162 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8163 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8164 TREE_OPERAND (exp, 1), 0))
8165 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8166 VOIDmode, modifier);
8170 /* We come here from MINUS_EXPR when the second operand is a
8173 /* Make sure any term that's a sum with a constant comes last. */
8174 if (GET_CODE (op0) == PLUS
8175 && CONSTANT_P (XEXP (op0, 1)))
8181 /* If adding to a sum including a constant,
8182 associate it to put the constant outside. */
8183 if (GET_CODE (op1) == PLUS
8184 && CONSTANT_P (XEXP (op1, 1)))
8186 rtx constant_term = const0_rtx;
8188 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8191 /* Ensure that MULT comes first if there is one. */
8192 else if (GET_CODE (op0) == MULT)
8193 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8195 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8197 /* Let's also eliminate constants from op0 if possible. */
8198 op0 = eliminate_constant_term (op0, &constant_term);
8200 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8201 their sum should be a constant. Form it into OP1, since the
8202 result we want will then be OP0 + OP1. */
8204 temp = simplify_binary_operation (PLUS, mode, constant_term,
8209 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8212 /* Put a constant term last and put a multiplication first. */
8213 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8214 temp = op1, op1 = op0, op0 = temp;
8216 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8217 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8220 /* For initializers, we are allowed to return a MINUS of two
8221 symbolic constants. Here we handle all cases when both operands
8223 /* Handle difference of two symbolic constants,
8224 for the sake of an initializer. */
8225 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8226 && really_constant_p (TREE_OPERAND (exp, 0))
8227 && really_constant_p (TREE_OPERAND (exp, 1)))
8229 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8231 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8234 /* If the last operand is a CONST_INT, use plus_constant of
8235 the negated constant. Else make the MINUS. */
8236 if (GET_CODE (op1) == CONST_INT)
8237 return plus_constant (op0, - INTVAL (op1));
8239 return gen_rtx_MINUS (mode, op0, op1);
8242 this_optab = ! unsignedp && flag_trapv
8243 && (GET_MODE_CLASS(mode) == MODE_INT)
8244 ? subv_optab : sub_optab;
8246 /* No sense saving up arithmetic to be done
8247 if it's all in the wrong mode to form part of an address.
8248 And force_operand won't know whether to sign-extend or
8250 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8251 || mode != ptr_mode)
8254 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8257 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8258 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8260 /* Convert A - const to A + (-const). */
8261 if (GET_CODE (op1) == CONST_INT)
8263 op1 = negate_rtx (mode, op1);
8270 /* If first operand is constant, swap them.
8271 Thus the following special case checks need only
8272 check the second operand. */
8273 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8275 tree t1 = TREE_OPERAND (exp, 0);
8276 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8277 TREE_OPERAND (exp, 1) = t1;
8280 /* Attempt to return something suitable for generating an
8281 indexed address, for machines that support that. */
8283 if (modifier == EXPAND_SUM && mode == ptr_mode
8284 && host_integerp (TREE_OPERAND (exp, 1), 0))
8286 tree exp1 = TREE_OPERAND (exp, 1);
8288 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8291 /* If we knew for certain that this is arithmetic for an array
8292 reference, and we knew the bounds of the array, then we could
8293 apply the distributive law across (PLUS X C) for constant C.
8294 Without such knowledge, we risk overflowing the computation
8295 when both X and C are large, but X+C isn't. */
8296 /* ??? Could perhaps special-case EXP being unsigned and C being
8297 positive. In that case we are certain that X+C is no smaller
8298 than X and so the transformed expression will overflow iff the
8299 original would have. */
8301 if (GET_CODE (op0) != REG)
8302 op0 = force_operand (op0, NULL_RTX);
8303 if (GET_CODE (op0) != REG)
8304 op0 = copy_to_mode_reg (mode, op0);
8306 return gen_rtx_MULT (mode, op0,
8307 gen_int_mode (tree_low_cst (exp1, 0),
8308 TYPE_MODE (TREE_TYPE (exp1))));
8311 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8314 if (modifier == EXPAND_STACK_PARM)
8317 /* Check for multiplying things that have been extended
8318 from a narrower type. If this machine supports multiplying
8319 in that narrower type with a result in the desired type,
8320 do it that way, and avoid the explicit type-conversion. */
8321 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8322 && TREE_CODE (type) == INTEGER_TYPE
8323 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8324 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8325 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8326 && int_fits_type_p (TREE_OPERAND (exp, 1),
8327 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8328 /* Don't use a widening multiply if a shift will do. */
8329 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8330 > HOST_BITS_PER_WIDE_INT)
8331 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8333 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8334 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8336 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8337 /* If both operands are extended, they must either both
8338 be zero-extended or both be sign-extended. */
8339 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8341 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8343 enum machine_mode innermode
8344 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8345 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8346 ? smul_widen_optab : umul_widen_optab);
8347 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8348 ? umul_widen_optab : smul_widen_optab);
8349 if (mode == GET_MODE_WIDER_MODE (innermode))
8351 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8353 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8354 NULL_RTX, VOIDmode, 0);
8355 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8356 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8359 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8360 NULL_RTX, VOIDmode, 0);
8363 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8364 && innermode == word_mode)
8367 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8368 NULL_RTX, VOIDmode, 0);
8369 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8370 op1 = convert_modes (innermode, mode,
8371 expand_expr (TREE_OPERAND (exp, 1),
8372 NULL_RTX, VOIDmode, 0),
8375 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8376 NULL_RTX, VOIDmode, 0);
8377 temp = expand_binop (mode, other_optab, op0, op1, target,
8378 unsignedp, OPTAB_LIB_WIDEN);
8379 htem = expand_mult_highpart_adjust (innermode,
8380 gen_highpart (innermode, temp),
8382 gen_highpart (innermode, temp),
8384 emit_move_insn (gen_highpart (innermode, temp), htem);
8389 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8390 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8391 TREE_OPERAND (exp, 1), 0))
8392 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8395 return expand_mult (mode, op0, op1, target, unsignedp);
8397 case TRUNC_DIV_EXPR:
8398 case FLOOR_DIV_EXPR:
8400 case ROUND_DIV_EXPR:
8401 case EXACT_DIV_EXPR:
8402 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8404 if (modifier == EXPAND_STACK_PARM)
8406 /* Possible optimization: compute the dividend with EXPAND_SUM
8407 then if the divisor is constant can optimize the case
8408 where some terms of the dividend have coeffs divisible by it. */
8409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8410 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8411 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8414 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8415 expensive divide. If not, combine will rebuild the original
8417 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8418 && TREE_CODE (type) == REAL_TYPE
8419 && !real_onep (TREE_OPERAND (exp, 0)))
8420 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8421 build (RDIV_EXPR, type,
8422 build_real (type, dconst1),
8423 TREE_OPERAND (exp, 1))),
8424 target, tmode, modifier);
8425 this_optab = sdiv_optab;
8428 case TRUNC_MOD_EXPR:
8429 case FLOOR_MOD_EXPR:
8431 case ROUND_MOD_EXPR:
8432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8434 if (modifier == EXPAND_STACK_PARM)
8436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8438 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8440 case FIX_ROUND_EXPR:
8441 case FIX_FLOOR_EXPR:
8443 abort (); /* Not used for C. */
8445 case FIX_TRUNC_EXPR:
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8447 if (target == 0 || modifier == EXPAND_STACK_PARM)
8448 target = gen_reg_rtx (mode);
8449 expand_fix (target, op0, unsignedp);
8453 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8454 if (target == 0 || modifier == EXPAND_STACK_PARM)
8455 target = gen_reg_rtx (mode);
8456 /* expand_float can't figure out what to do if FROM has VOIDmode.
8457 So give it the correct mode. With -O, cse will optimize this. */
8458 if (GET_MODE (op0) == VOIDmode)
8459 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8461 expand_float (target, op0,
8462 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8467 if (modifier == EXPAND_STACK_PARM)
8469 temp = expand_unop (mode,
8470 ! unsignedp && flag_trapv
8471 && (GET_MODE_CLASS(mode) == MODE_INT)
8472 ? negv_optab : neg_optab, op0, target, 0);
8478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8479 if (modifier == EXPAND_STACK_PARM)
8482 /* Handle complex values specially. */
8483 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8484 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8485 return expand_complex_abs (mode, op0, target, unsignedp);
8487 /* Unsigned abs is simply the operand. Testing here means we don't
8488 risk generating incorrect code below. */
8489 if (TREE_UNSIGNED (type))
8492 return expand_abs (mode, op0, target, unsignedp,
8493 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8497 target = original_target;
8499 || modifier == EXPAND_STACK_PARM
8500 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8501 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8502 || GET_MODE (target) != mode
8503 || (GET_CODE (target) == REG
8504 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8505 target = gen_reg_rtx (mode);
8506 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8507 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8509 /* First try to do it with a special MIN or MAX instruction.
8510 If that does not win, use a conditional jump to select the proper
8512 this_optab = (TREE_UNSIGNED (type)
8513 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8514 : (code == MIN_EXPR ? smin_optab : smax_optab));
8516 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8521 /* At this point, a MEM target is no longer useful; we will get better
8524 if (GET_CODE (target) == MEM)
8525 target = gen_reg_rtx (mode);
8528 emit_move_insn (target, op0);
8530 op0 = gen_label_rtx ();
8532 /* If this mode is an integer too wide to compare properly,
8533 compare word by word. Rely on cse to optimize constant cases. */
8534 if (GET_MODE_CLASS (mode) == MODE_INT
8535 && ! can_compare_p (GE, mode, ccp_jump))
8537 if (code == MAX_EXPR)
8538 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8539 target, op1, NULL_RTX, op0);
8541 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8542 op1, target, NULL_RTX, op0);
8546 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8547 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8548 unsignedp, mode, NULL_RTX, NULL_RTX,
8551 emit_move_insn (target, op1);
8556 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8557 if (modifier == EXPAND_STACK_PARM)
8559 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8566 if (modifier == EXPAND_STACK_PARM)
8568 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8574 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8575 temp = expand_unop (mode, clz_optab, op0, target, 1);
8581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8582 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8589 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8595 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8596 temp = expand_unop (mode, parity_optab, op0, target, 1);
8601 /* ??? Can optimize bitwise operations with one arg constant.
8602 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8603 and (a bitwise1 b) bitwise2 b (etc)
8604 but that is probably not worth while. */
8606 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8607 boolean values when we want in all cases to compute both of them. In
8608 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8609 as actual zero-or-1 values and then bitwise anding. In cases where
8610 there cannot be any side effects, better code would be made by
8611 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8612 how to recognize those cases. */
8614 case TRUTH_AND_EXPR:
8616 this_optab = and_optab;
8621 this_optab = ior_optab;
8624 case TRUTH_XOR_EXPR:
8626 this_optab = xor_optab;
8633 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8635 if (modifier == EXPAND_STACK_PARM)
8637 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8638 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8641 /* Could determine the answer when only additive constants differ. Also,
8642 the addition of one can be handled by changing the condition. */
8649 case UNORDERED_EXPR:
8656 temp = do_store_flag (exp,
8657 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8658 tmode != VOIDmode ? tmode : mode, 0);
8662 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8663 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8665 && GET_CODE (original_target) == REG
8666 && (GET_MODE (original_target)
8667 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8669 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8672 /* If temp is constant, we can just compute the result. */
8673 if (GET_CODE (temp) == CONST_INT)
8675 if (INTVAL (temp) != 0)
8676 emit_move_insn (target, const1_rtx);
8678 emit_move_insn (target, const0_rtx);
8683 if (temp != original_target)
8685 enum machine_mode mode1 = GET_MODE (temp);
8686 if (mode1 == VOIDmode)
8687 mode1 = tmode != VOIDmode ? tmode : mode;
8689 temp = copy_to_mode_reg (mode1, temp);
8692 op1 = gen_label_rtx ();
8693 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8694 GET_MODE (temp), unsignedp, op1);
8695 emit_move_insn (temp, const1_rtx);
8700 /* If no set-flag instruction, must generate a conditional
8701 store into a temporary variable. Drop through
8702 and handle this like && and ||. */
8704 case TRUTH_ANDIF_EXPR:
8705 case TRUTH_ORIF_EXPR:
8708 || modifier == EXPAND_STACK_PARM
8709 || ! safe_from_p (target, exp, 1)
8710 /* Make sure we don't have a hard reg (such as function's return
8711 value) live across basic blocks, if not optimizing. */
8712 || (!optimize && GET_CODE (target) == REG
8713 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8714 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8717 emit_clr_insn (target);
8719 op1 = gen_label_rtx ();
8720 jumpifnot (exp, op1);
8723 emit_0_to_1_insn (target);
8726 return ignore ? const0_rtx : target;
8728 case TRUTH_NOT_EXPR:
8729 if (modifier == EXPAND_STACK_PARM)
8731 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8732 /* The parser is careful to generate TRUTH_NOT_EXPR
8733 only with operands that are always zero or one. */
8734 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8735 target, 1, OPTAB_LIB_WIDEN);
8741 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8743 return expand_expr (TREE_OPERAND (exp, 1),
8744 (ignore ? const0_rtx : target),
8745 VOIDmode, modifier);
8748 /* If we would have a "singleton" (see below) were it not for a
8749 conversion in each arm, bring that conversion back out. */
8750 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8751 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8752 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8753 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8755 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8756 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8758 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8759 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8760 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8761 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8762 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8763 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8764 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8765 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8766 return expand_expr (build1 (NOP_EXPR, type,
8767 build (COND_EXPR, TREE_TYPE (iftrue),
8768 TREE_OPERAND (exp, 0),
8770 target, tmode, modifier);
8774 /* Note that COND_EXPRs whose type is a structure or union
8775 are required to be constructed to contain assignments of
8776 a temporary variable, so that we can evaluate them here
8777 for side effect only. If type is void, we must do likewise. */
8779 /* If an arm of the branch requires a cleanup,
8780 only that cleanup is performed. */
8783 tree binary_op = 0, unary_op = 0;
8785 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8786 convert it to our mode, if necessary. */
8787 if (integer_onep (TREE_OPERAND (exp, 1))
8788 && integer_zerop (TREE_OPERAND (exp, 2))
8789 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8793 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8798 if (modifier == EXPAND_STACK_PARM)
8800 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8801 if (GET_MODE (op0) == mode)
8805 target = gen_reg_rtx (mode);
8806 convert_move (target, op0, unsignedp);
8810 /* Check for X ? A + B : A. If we have this, we can copy A to the
8811 output and conditionally add B. Similarly for unary operations.
8812 Don't do this if X has side-effects because those side effects
8813 might affect A or B and the "?" operation is a sequence point in
8814 ANSI. (operand_equal_p tests for side effects.) */
8816 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8817 && operand_equal_p (TREE_OPERAND (exp, 2),
8818 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8819 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8820 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8821 && operand_equal_p (TREE_OPERAND (exp, 1),
8822 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8823 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8824 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8825 && operand_equal_p (TREE_OPERAND (exp, 2),
8826 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8827 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8828 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8829 && operand_equal_p (TREE_OPERAND (exp, 1),
8830 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8831 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8833 /* If we are not to produce a result, we have no target. Otherwise,
8834 if a target was specified use it; it will not be used as an
8835 intermediate target unless it is safe. If no target, use a
8840 else if (modifier == EXPAND_STACK_PARM)
8841 temp = assign_temp (type, 0, 0, 1);
8842 else if (original_target
8843 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8844 || (singleton && GET_CODE (original_target) == REG
8845 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8846 && original_target == var_rtx (singleton)))
8847 && GET_MODE (original_target) == mode
8848 #ifdef HAVE_conditional_move
8849 && (! can_conditionally_move_p (mode)
8850 || GET_CODE (original_target) == REG
8851 || TREE_ADDRESSABLE (type))
8853 && (GET_CODE (original_target) != MEM
8854 || TREE_ADDRESSABLE (type)))
8855 temp = original_target;
8856 else if (TREE_ADDRESSABLE (type))
8859 temp = assign_temp (type, 0, 0, 1);
8861 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8862 do the test of X as a store-flag operation, do this as
8863 A + ((X != 0) << log C). Similarly for other simple binary
8864 operators. Only do for C == 1 if BRANCH_COST is low. */
8865 if (temp && singleton && binary_op
8866 && (TREE_CODE (binary_op) == PLUS_EXPR
8867 || TREE_CODE (binary_op) == MINUS_EXPR
8868 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8869 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8870 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8871 : integer_onep (TREE_OPERAND (binary_op, 1)))
8872 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8876 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8877 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8878 ? addv_optab : add_optab)
8879 : TREE_CODE (binary_op) == MINUS_EXPR
8880 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8881 ? subv_optab : sub_optab)
8882 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8885 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8886 if (singleton == TREE_OPERAND (exp, 1))
8887 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8889 cond = TREE_OPERAND (exp, 0);
8891 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8893 mode, BRANCH_COST <= 1);
8895 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8896 result = expand_shift (LSHIFT_EXPR, mode, result,
8897 build_int_2 (tree_log2
8901 (safe_from_p (temp, singleton, 1)
8902 ? temp : NULL_RTX), 0);
8906 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8907 return expand_binop (mode, boptab, op1, result, temp,
8908 unsignedp, OPTAB_LIB_WIDEN);
8912 do_pending_stack_adjust ();
8914 op0 = gen_label_rtx ();
8916 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8920 /* If the target conflicts with the other operand of the
8921 binary op, we can't use it. Also, we can't use the target
8922 if it is a hard register, because evaluating the condition
8923 might clobber it. */
8925 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8926 || (GET_CODE (temp) == REG
8927 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8928 temp = gen_reg_rtx (mode);
8929 store_expr (singleton, temp,
8930 modifier == EXPAND_STACK_PARM ? 2 : 0);
8933 expand_expr (singleton,
8934 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8935 if (singleton == TREE_OPERAND (exp, 1))
8936 jumpif (TREE_OPERAND (exp, 0), op0);
8938 jumpifnot (TREE_OPERAND (exp, 0), op0);
8940 start_cleanup_deferral ();
8941 if (binary_op && temp == 0)
8942 /* Just touch the other operand. */
8943 expand_expr (TREE_OPERAND (binary_op, 1),
8944 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8946 store_expr (build (TREE_CODE (binary_op), type,
8947 make_tree (type, temp),
8948 TREE_OPERAND (binary_op, 1)),
8949 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8951 store_expr (build1 (TREE_CODE (unary_op), type,
8952 make_tree (type, temp)),
8953 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8956 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8957 comparison operator. If we have one of these cases, set the
8958 output to A, branch on A (cse will merge these two references),
8959 then set the output to FOO. */
8961 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8962 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8963 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8964 TREE_OPERAND (exp, 1), 0)
8965 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8966 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8967 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8969 if (GET_CODE (temp) == REG
8970 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8971 temp = gen_reg_rtx (mode);
8972 store_expr (TREE_OPERAND (exp, 1), temp,
8973 modifier == EXPAND_STACK_PARM ? 2 : 0);
8974 jumpif (TREE_OPERAND (exp, 0), op0);
8976 start_cleanup_deferral ();
8977 store_expr (TREE_OPERAND (exp, 2), temp,
8978 modifier == EXPAND_STACK_PARM ? 2 : 0);
8982 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8983 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8984 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8985 TREE_OPERAND (exp, 2), 0)
8986 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8987 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8988 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8990 if (GET_CODE (temp) == REG
8991 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8992 temp = gen_reg_rtx (mode);
8993 store_expr (TREE_OPERAND (exp, 2), temp,
8994 modifier == EXPAND_STACK_PARM ? 2 : 0);
8995 jumpifnot (TREE_OPERAND (exp, 0), op0);
8997 start_cleanup_deferral ();
8998 store_expr (TREE_OPERAND (exp, 1), temp,
8999 modifier == EXPAND_STACK_PARM ? 2 : 0);
9004 op1 = gen_label_rtx ();
9005 jumpifnot (TREE_OPERAND (exp, 0), op0);
9007 start_cleanup_deferral ();
9009 /* One branch of the cond can be void, if it never returns. For
9010 example A ? throw : E */
9012 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9013 store_expr (TREE_OPERAND (exp, 1), temp,
9014 modifier == EXPAND_STACK_PARM ? 2 : 0);
9016 expand_expr (TREE_OPERAND (exp, 1),
9017 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9018 end_cleanup_deferral ();
9020 emit_jump_insn (gen_jump (op1));
9023 start_cleanup_deferral ();
9025 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9026 store_expr (TREE_OPERAND (exp, 2), temp,
9027 modifier == EXPAND_STACK_PARM ? 2 : 0);
9029 expand_expr (TREE_OPERAND (exp, 2),
9030 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9033 end_cleanup_deferral ();
9044 /* Something needs to be initialized, but we didn't know
9045 where that thing was when building the tree. For example,
9046 it could be the return value of a function, or a parameter
9047 to a function which lays down in the stack, or a temporary
9048 variable which must be passed by reference.
9050 We guarantee that the expression will either be constructed
9051 or copied into our original target. */
9053 tree slot = TREE_OPERAND (exp, 0);
9054 tree cleanups = NULL_TREE;
9057 if (TREE_CODE (slot) != VAR_DECL)
9061 target = original_target;
9063 /* Set this here so that if we get a target that refers to a
9064 register variable that's already been used, put_reg_into_stack
9065 knows that it should fix up those uses. */
9066 TREE_USED (slot) = 1;
9070 if (DECL_RTL_SET_P (slot))
9072 target = DECL_RTL (slot);
9073 /* If we have already expanded the slot, so don't do
9075 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9080 target = assign_temp (type, 2, 0, 1);
9081 /* All temp slots at this level must not conflict. */
9082 preserve_temp_slots (target);
9083 SET_DECL_RTL (slot, target);
9084 if (TREE_ADDRESSABLE (slot))
9085 put_var_into_stack (slot, /*rescan=*/false);
9087 /* Since SLOT is not known to the called function
9088 to belong to its stack frame, we must build an explicit
9089 cleanup. This case occurs when we must build up a reference
9090 to pass the reference as an argument. In this case,
9091 it is very likely that such a reference need not be
9094 if (TREE_OPERAND (exp, 2) == 0)
9095 TREE_OPERAND (exp, 2)
9096 = (*lang_hooks.maybe_build_cleanup) (slot);
9097 cleanups = TREE_OPERAND (exp, 2);
9102 /* This case does occur, when expanding a parameter which
9103 needs to be constructed on the stack. The target
9104 is the actual stack address that we want to initialize.
9105 The function we call will perform the cleanup in this case. */
9107 /* If we have already assigned it space, use that space,
9108 not target that we were passed in, as our target
9109 parameter is only a hint. */
9110 if (DECL_RTL_SET_P (slot))
9112 target = DECL_RTL (slot);
9113 /* If we have already expanded the slot, so don't do
9115 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9120 SET_DECL_RTL (slot, target);
9121 /* If we must have an addressable slot, then make sure that
9122 the RTL that we just stored in slot is OK. */
9123 if (TREE_ADDRESSABLE (slot))
9124 put_var_into_stack (slot, /*rescan=*/true);
9128 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9129 /* Mark it as expanded. */
9130 TREE_OPERAND (exp, 1) = NULL_TREE;
9132 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9134 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9141 tree lhs = TREE_OPERAND (exp, 0);
9142 tree rhs = TREE_OPERAND (exp, 1);
9144 temp = expand_assignment (lhs, rhs, ! ignore);
9150 /* If lhs is complex, expand calls in rhs before computing it.
9151 That's so we don't compute a pointer and save it over a
9152 call. If lhs is simple, compute it first so we can give it
9153 as a target if the rhs is just a call. This avoids an
9154 extra temp and copy and that prevents a partial-subsumption
9155 which makes bad code. Actually we could treat
9156 component_ref's of vars like vars. */
9158 tree lhs = TREE_OPERAND (exp, 0);
9159 tree rhs = TREE_OPERAND (exp, 1);
9163 /* Check for |= or &= of a bitfield of size one into another bitfield
9164 of size 1. In this case, (unless we need the result of the
9165 assignment) we can do this more efficiently with a
9166 test followed by an assignment, if necessary.
9168 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9169 things change so we do, this code should be enhanced to
9172 && TREE_CODE (lhs) == COMPONENT_REF
9173 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9174 || TREE_CODE (rhs) == BIT_AND_EXPR)
9175 && TREE_OPERAND (rhs, 0) == lhs
9176 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9177 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9178 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9180 rtx label = gen_label_rtx ();
9182 do_jump (TREE_OPERAND (rhs, 1),
9183 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9184 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9185 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9186 (TREE_CODE (rhs) == BIT_IOR_EXPR
9188 : integer_zero_node)),
9190 do_pending_stack_adjust ();
9195 temp = expand_assignment (lhs, rhs, ! ignore);
9201 if (!TREE_OPERAND (exp, 0))
9202 expand_null_return ();
9204 expand_return (TREE_OPERAND (exp, 0));
9207 case PREINCREMENT_EXPR:
9208 case PREDECREMENT_EXPR:
9209 return expand_increment (exp, 0, ignore);
9211 case POSTINCREMENT_EXPR:
9212 case POSTDECREMENT_EXPR:
9213 /* Faster to treat as pre-increment if result is not used. */
9214 return expand_increment (exp, ! ignore, ignore);
9217 if (modifier == EXPAND_STACK_PARM)
9219 /* Are we taking the address of a nested function? */
9220 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9221 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9222 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9223 && ! TREE_STATIC (exp))
9225 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9226 op0 = force_operand (op0, target);
9228 /* If we are taking the address of something erroneous, just
9230 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9232 /* If we are taking the address of a constant and are at the
9233 top level, we have to use output_constant_def since we can't
9234 call force_const_mem at top level. */
9236 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9237 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9239 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9242 /* We make sure to pass const0_rtx down if we came in with
9243 ignore set, to avoid doing the cleanups twice for something. */
9244 op0 = expand_expr (TREE_OPERAND (exp, 0),
9245 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9246 (modifier == EXPAND_INITIALIZER
9247 ? modifier : EXPAND_CONST_ADDRESS));
9249 /* If we are going to ignore the result, OP0 will have been set
9250 to const0_rtx, so just return it. Don't get confused and
9251 think we are taking the address of the constant. */
9255 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9256 clever and returns a REG when given a MEM. */
9257 op0 = protect_from_queue (op0, 1);
9259 /* We would like the object in memory. If it is a constant, we can
9260 have it be statically allocated into memory. For a non-constant,
9261 we need to allocate some memory and store the value into it. */
9263 if (CONSTANT_P (op0))
9264 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9266 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9267 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9268 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9270 /* If the operand is a SAVE_EXPR, we can deal with this by
9271 forcing the SAVE_EXPR into memory. */
9272 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9274 put_var_into_stack (TREE_OPERAND (exp, 0),
9276 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9280 /* If this object is in a register, it can't be BLKmode. */
9281 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9282 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9284 if (GET_CODE (op0) == PARALLEL)
9285 /* Handle calls that pass values in multiple
9286 non-contiguous locations. The Irix 6 ABI has examples
9288 emit_group_store (memloc, op0, inner_type,
9289 int_size_in_bytes (inner_type));
9291 emit_move_insn (memloc, op0);
9297 if (GET_CODE (op0) != MEM)
9300 mark_temp_addr_taken (op0);
9301 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9303 op0 = XEXP (op0, 0);
9304 #ifdef POINTERS_EXTEND_UNSIGNED
9305 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9306 && mode == ptr_mode)
9307 op0 = convert_memory_address (ptr_mode, op0);
9312 /* If OP0 is not aligned as least as much as the type requires, we
9313 need to make a temporary, copy OP0 to it, and take the address of
9314 the temporary. We want to use the alignment of the type, not of
9315 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9316 the test for BLKmode means that can't happen. The test for
9317 BLKmode is because we never make mis-aligned MEMs with
9320 We don't need to do this at all if the machine doesn't have
9321 strict alignment. */
9322 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9323 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9325 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9327 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9330 if (TYPE_ALIGN_OK (inner_type))
9333 if (TREE_ADDRESSABLE (inner_type))
9335 /* We can't make a bitwise copy of this object, so fail. */
9336 error ("cannot take the address of an unaligned member");
9340 new = assign_stack_temp_for_type
9341 (TYPE_MODE (inner_type),
9342 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9343 : int_size_in_bytes (inner_type),
9344 1, build_qualified_type (inner_type,
9345 (TYPE_QUALS (inner_type)
9346 | TYPE_QUAL_CONST)));
9348 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9349 (modifier == EXPAND_STACK_PARM
9350 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9355 op0 = force_operand (XEXP (op0, 0), target);
9359 && GET_CODE (op0) != REG
9360 && modifier != EXPAND_CONST_ADDRESS
9361 && modifier != EXPAND_INITIALIZER
9362 && modifier != EXPAND_SUM)
9363 op0 = force_reg (Pmode, op0);
9365 if (GET_CODE (op0) == REG
9366 && ! REG_USERVAR_P (op0))
9367 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9369 #ifdef POINTERS_EXTEND_UNSIGNED
9370 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9371 && mode == ptr_mode)
9372 op0 = convert_memory_address (ptr_mode, op0);
9377 case ENTRY_VALUE_EXPR:
9380 /* COMPLEX type for Extended Pascal & Fortran */
9383 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9386 /* Get the rtx code of the operands. */
9387 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9388 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9391 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9395 /* Move the real (op0) and imaginary (op1) parts to their location. */
9396 emit_move_insn (gen_realpart (mode, target), op0);
9397 emit_move_insn (gen_imagpart (mode, target), op1);
9399 insns = get_insns ();
9402 /* Complex construction should appear as a single unit. */
9403 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9404 each with a separate pseudo as destination.
9405 It's not correct for flow to treat them as a unit. */
9406 if (GET_CODE (target) != CONCAT)
9407 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9415 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9416 return gen_realpart (mode, op0);
9419 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9420 return gen_imagpart (mode, op0);
9424 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9428 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9431 target = gen_reg_rtx (mode);
9435 /* Store the realpart and the negated imagpart to target. */
9436 emit_move_insn (gen_realpart (partmode, target),
9437 gen_realpart (partmode, op0));
9439 imag_t = gen_imagpart (partmode, target);
9440 temp = expand_unop (partmode,
9441 ! unsignedp && flag_trapv
9442 && (GET_MODE_CLASS(partmode) == MODE_INT)
9443 ? negv_optab : neg_optab,
9444 gen_imagpart (partmode, op0), imag_t, 0);
9446 emit_move_insn (imag_t, temp);
9448 insns = get_insns ();
9451 /* Conjugate should appear as a single unit
9452 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9453 each with a separate pseudo as destination.
9454 It's not correct for flow to treat them as a unit. */
9455 if (GET_CODE (target) != CONCAT)
9456 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9463 case TRY_CATCH_EXPR:
9465 tree handler = TREE_OPERAND (exp, 1);
9467 expand_eh_region_start ();
9469 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9471 expand_eh_region_end_cleanup (handler);
9476 case TRY_FINALLY_EXPR:
9478 tree try_block = TREE_OPERAND (exp, 0);
9479 tree finally_block = TREE_OPERAND (exp, 1);
9481 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9483 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9484 is not sufficient, so we cannot expand the block twice.
9485 So we play games with GOTO_SUBROUTINE_EXPR to let us
9486 expand the thing only once. */
9487 /* When not optimizing, we go ahead with this form since
9488 (1) user breakpoints operate more predictably without
9489 code duplication, and
9490 (2) we're not running any of the global optimizers
9491 that would explode in time/space with the highly
9492 connected CFG created by the indirect branching. */
9494 rtx finally_label = gen_label_rtx ();
9495 rtx done_label = gen_label_rtx ();
9496 rtx return_link = gen_reg_rtx (Pmode);
9497 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9498 (tree) finally_label, (tree) return_link);
9499 TREE_SIDE_EFFECTS (cleanup) = 1;
9501 /* Start a new binding layer that will keep track of all cleanup
9502 actions to be performed. */
9503 expand_start_bindings (2);
9504 target_temp_slot_level = temp_slot_level;
9506 expand_decl_cleanup (NULL_TREE, cleanup);
9507 op0 = expand_expr (try_block, target, tmode, modifier);
9509 preserve_temp_slots (op0);
9510 expand_end_bindings (NULL_TREE, 0, 0);
9511 emit_jump (done_label);
9512 emit_label (finally_label);
9513 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9514 emit_indirect_jump (return_link);
9515 emit_label (done_label);
9519 expand_start_bindings (2);
9520 target_temp_slot_level = temp_slot_level;
9522 expand_decl_cleanup (NULL_TREE, finally_block);
9523 op0 = expand_expr (try_block, target, tmode, modifier);
9525 preserve_temp_slots (op0);
9526 expand_end_bindings (NULL_TREE, 0, 0);
9532 case GOTO_SUBROUTINE_EXPR:
9534 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9535 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9536 rtx return_address = gen_label_rtx ();
9537 emit_move_insn (return_link,
9538 gen_rtx_LABEL_REF (Pmode, return_address));
9540 emit_label (return_address);
9545 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9548 return get_exception_pointer (cfun);
9551 /* Function descriptors are not valid except for as
9552 initialization constants, and should not be expanded. */
9556 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9559 /* Here to do an ordinary binary operator, generating an instruction
9560 from the optab already placed in `this_optab'. */
9562 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9565 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9567 if (modifier == EXPAND_STACK_PARM)
9569 temp = expand_binop (mode, this_optab, op0, op1, target,
9570 unsignedp, OPTAB_LIB_WIDEN);
9576 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9577 when applied to the address of EXP produces an address known to be
9578 aligned more than BIGGEST_ALIGNMENT. */
9581 is_aligning_offset (tree offset, tree exp)
9583 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9584 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9585 || TREE_CODE (offset) == NOP_EXPR
9586 || TREE_CODE (offset) == CONVERT_EXPR
9587 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9588 offset = TREE_OPERAND (offset, 0);
9590 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9591 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9592 if (TREE_CODE (offset) != BIT_AND_EXPR
9593 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9594 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9595 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9598 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9599 It must be NEGATE_EXPR. Then strip any more conversions. */
9600 offset = TREE_OPERAND (offset, 0);
9601 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9602 || TREE_CODE (offset) == NOP_EXPR
9603 || TREE_CODE (offset) == CONVERT_EXPR)
9604 offset = TREE_OPERAND (offset, 0);
9606 if (TREE_CODE (offset) != NEGATE_EXPR)
9609 offset = TREE_OPERAND (offset, 0);
9610 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9611 || TREE_CODE (offset) == NOP_EXPR
9612 || TREE_CODE (offset) == CONVERT_EXPR)
9613 offset = TREE_OPERAND (offset, 0);
9615 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9616 whose type is the same as EXP. */
9617 return (TREE_CODE (offset) == ADDR_EXPR
9618 && (TREE_OPERAND (offset, 0) == exp
9619 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9620 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9621 == TREE_TYPE (exp)))));
9624 /* Return the tree node if an ARG corresponds to a string constant or zero
9625 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9626 in bytes within the string that ARG is accessing. The type of the
9627 offset will be `sizetype'. */
9630 string_constant (tree arg, tree *ptr_offset)
9634 if (TREE_CODE (arg) == ADDR_EXPR
9635 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9637 *ptr_offset = size_zero_node;
9638 return TREE_OPERAND (arg, 0);
9640 else if (TREE_CODE (arg) == PLUS_EXPR)
9642 tree arg0 = TREE_OPERAND (arg, 0);
9643 tree arg1 = TREE_OPERAND (arg, 1);
9648 if (TREE_CODE (arg0) == ADDR_EXPR
9649 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9651 *ptr_offset = convert (sizetype, arg1);
9652 return TREE_OPERAND (arg0, 0);
9654 else if (TREE_CODE (arg1) == ADDR_EXPR
9655 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9657 *ptr_offset = convert (sizetype, arg0);
9658 return TREE_OPERAND (arg1, 0);
9665 /* Expand code for a post- or pre- increment or decrement
9666 and return the RTX for the result.
9667 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9670 expand_increment (tree exp, int post, int ignore)
9674 tree incremented = TREE_OPERAND (exp, 0);
9675 optab this_optab = add_optab;
9677 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9678 int op0_is_copy = 0;
9679 int single_insn = 0;
9680 /* 1 means we can't store into OP0 directly,
9681 because it is a subreg narrower than a word,
9682 and we don't dare clobber the rest of the word. */
9685 /* Stabilize any component ref that might need to be
9686 evaluated more than once below. */
9688 || TREE_CODE (incremented) == BIT_FIELD_REF
9689 || (TREE_CODE (incremented) == COMPONENT_REF
9690 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9691 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9692 incremented = stabilize_reference (incremented);
9693 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9694 ones into save exprs so that they don't accidentally get evaluated
9695 more than once by the code below. */
9696 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9697 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9698 incremented = save_expr (incremented);
9700 /* Compute the operands as RTX.
9701 Note whether OP0 is the actual lvalue or a copy of it:
9702 I believe it is a copy iff it is a register or subreg
9703 and insns were generated in computing it. */
9705 temp = get_last_insn ();
9706 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9708 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9709 in place but instead must do sign- or zero-extension during assignment,
9710 so we copy it into a new register and let the code below use it as
9713 Note that we can safely modify this SUBREG since it is know not to be
9714 shared (it was made by the expand_expr call above). */
9716 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9719 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9723 else if (GET_CODE (op0) == SUBREG
9724 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9726 /* We cannot increment this SUBREG in place. If we are
9727 post-incrementing, get a copy of the old value. Otherwise,
9728 just mark that we cannot increment in place. */
9730 op0 = copy_to_reg (op0);
9735 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9736 && temp != get_last_insn ());
9737 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9739 /* Decide whether incrementing or decrementing. */
9740 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9741 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9742 this_optab = sub_optab;
9744 /* Convert decrement by a constant into a negative increment. */
9745 if (this_optab == sub_optab
9746 && GET_CODE (op1) == CONST_INT)
9748 op1 = GEN_INT (-INTVAL (op1));
9749 this_optab = add_optab;
9752 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9753 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9755 /* For a preincrement, see if we can do this with a single instruction. */
9758 icode = (int) this_optab->handlers[(int) mode].insn_code;
9759 if (icode != (int) CODE_FOR_nothing
9760 /* Make sure that OP0 is valid for operands 0 and 1
9761 of the insn we want to queue. */
9762 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9763 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9764 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9768 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9769 then we cannot just increment OP0. We must therefore contrive to
9770 increment the original value. Then, for postincrement, we can return
9771 OP0 since it is a copy of the old value. For preincrement, expand here
9772 unless we can do it with a single insn.
9774 Likewise if storing directly into OP0 would clobber high bits
9775 we need to preserve (bad_subreg). */
9776 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9778 /* This is the easiest way to increment the value wherever it is.
9779 Problems with multiple evaluation of INCREMENTED are prevented
9780 because either (1) it is a component_ref or preincrement,
9781 in which case it was stabilized above, or (2) it is an array_ref
9782 with constant index in an array in a register, which is
9783 safe to reevaluate. */
9784 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9785 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9786 ? MINUS_EXPR : PLUS_EXPR),
9789 TREE_OPERAND (exp, 1));
9791 while (TREE_CODE (incremented) == NOP_EXPR
9792 || TREE_CODE (incremented) == CONVERT_EXPR)
9794 newexp = convert (TREE_TYPE (incremented), newexp);
9795 incremented = TREE_OPERAND (incremented, 0);
9798 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9799 return post ? op0 : temp;
9804 /* We have a true reference to the value in OP0.
9805 If there is an insn to add or subtract in this mode, queue it.
9806 Queueing the increment insn avoids the register shuffling
9807 that often results if we must increment now and first save
9808 the old value for subsequent use. */
9810 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9811 op0 = stabilize (op0);
9814 icode = (int) this_optab->handlers[(int) mode].insn_code;
9815 if (icode != (int) CODE_FOR_nothing
9816 /* Make sure that OP0 is valid for operands 0 and 1
9817 of the insn we want to queue. */
9818 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9819 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9821 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9822 op1 = force_reg (mode, op1);
9824 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9826 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9828 rtx addr = (general_operand (XEXP (op0, 0), mode)
9829 ? force_reg (Pmode, XEXP (op0, 0))
9830 : copy_to_reg (XEXP (op0, 0)));
9833 op0 = replace_equiv_address (op0, addr);
9834 temp = force_reg (GET_MODE (op0), op0);
9835 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9836 op1 = force_reg (mode, op1);
9838 /* The increment queue is LIFO, thus we have to `queue'
9839 the instructions in reverse order. */
9840 enqueue_insn (op0, gen_move_insn (op0, temp));
9841 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9846 /* Preincrement, or we can't increment with one simple insn. */
9848 /* Save a copy of the value before inc or dec, to return it later. */
9849 temp = value = copy_to_reg (op0);
9851 /* Arrange to return the incremented value. */
9852 /* Copy the rtx because expand_binop will protect from the queue,
9853 and the results of that would be invalid for us to return
9854 if our caller does emit_queue before using our result. */
9855 temp = copy_rtx (value = op0);
9857 /* Increment however we can. */
9858 op1 = expand_binop (mode, this_optab, value, op1, op0,
9859 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9861 /* Make sure the value is stored into OP0. */
9863 emit_move_insn (op0, op1);
9868 /* Generate code to calculate EXP using a store-flag instruction
9869 and return an rtx for the result. EXP is either a comparison
9870 or a TRUTH_NOT_EXPR whose operand is a comparison.
9872 If TARGET is nonzero, store the result there if convenient.
9874 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9877 Return zero if there is no suitable set-flag instruction
9878 available on this machine.
9880 Once expand_expr has been called on the arguments of the comparison,
9881 we are committed to doing the store flag, since it is not safe to
9882 re-evaluate the expression. We emit the store-flag insn by calling
9883 emit_store_flag, but only expand the arguments if we have a reason
9884 to believe that emit_store_flag will be successful. If we think that
9885 it will, but it isn't, we have to simulate the store-flag with a
9886 set/jump/set sequence. */
9889 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9892 tree arg0, arg1, type;
9894 enum machine_mode operand_mode;
9898 enum insn_code icode;
9899 rtx subtarget = target;
9902 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9903 result at the end. We can't simply invert the test since it would
9904 have already been inverted if it were valid. This case occurs for
9905 some floating-point comparisons. */
9907 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9908 invert = 1, exp = TREE_OPERAND (exp, 0);
9910 arg0 = TREE_OPERAND (exp, 0);
9911 arg1 = TREE_OPERAND (exp, 1);
9913 /* Don't crash if the comparison was erroneous. */
9914 if (arg0 == error_mark_node || arg1 == error_mark_node)
9917 type = TREE_TYPE (arg0);
9918 operand_mode = TYPE_MODE (type);
9919 unsignedp = TREE_UNSIGNED (type);
9921 /* We won't bother with BLKmode store-flag operations because it would mean
9922 passing a lot of information to emit_store_flag. */
9923 if (operand_mode == BLKmode)
9926 /* We won't bother with store-flag operations involving function pointers
9927 when function pointers must be canonicalized before comparisons. */
9928 #ifdef HAVE_canonicalize_funcptr_for_compare
9929 if (HAVE_canonicalize_funcptr_for_compare
9930 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9933 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9934 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9935 == FUNCTION_TYPE))))
9942 /* Get the rtx comparison code to use. We know that EXP is a comparison
9943 operation of some type. Some comparisons against 1 and -1 can be
9944 converted to comparisons with zero. Do so here so that the tests
9945 below will be aware that we have a comparison with zero. These
9946 tests will not catch constants in the first operand, but constants
9947 are rarely passed as the first operand. */
9949 switch (TREE_CODE (exp))
9958 if (integer_onep (arg1))
9959 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9961 code = unsignedp ? LTU : LT;
9964 if (! unsignedp && integer_all_onesp (arg1))
9965 arg1 = integer_zero_node, code = LT;
9967 code = unsignedp ? LEU : LE;
9970 if (! unsignedp && integer_all_onesp (arg1))
9971 arg1 = integer_zero_node, code = GE;
9973 code = unsignedp ? GTU : GT;
9976 if (integer_onep (arg1))
9977 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9979 code = unsignedp ? GEU : GE;
9982 case UNORDERED_EXPR:
10008 /* Put a constant second. */
10009 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10011 tem = arg0; arg0 = arg1; arg1 = tem;
10012 code = swap_condition (code);
10015 /* If this is an equality or inequality test of a single bit, we can
10016 do this by shifting the bit being tested to the low-order bit and
10017 masking the result with the constant 1. If the condition was EQ,
10018 we xor it with 1. This does not require an scc insn and is faster
10019 than an scc insn even if we have it.
10021 The code to make this transformation was moved into fold_single_bit_test,
10022 so we just call into the folder and expand its result. */
10024 if ((code == NE || code == EQ)
10025 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10026 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10028 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10029 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10031 target, VOIDmode, EXPAND_NORMAL);
10034 /* Now see if we are likely to be able to do this. Return if not. */
10035 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10038 icode = setcc_gen_code[(int) code];
10039 if (icode == CODE_FOR_nothing
10040 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10042 /* We can only do this if it is one of the special cases that
10043 can be handled without an scc insn. */
10044 if ((code == LT && integer_zerop (arg1))
10045 || (! only_cheap && code == GE && integer_zerop (arg1)))
10047 else if (BRANCH_COST >= 0
10048 && ! only_cheap && (code == NE || code == EQ)
10049 && TREE_CODE (type) != REAL_TYPE
10050 && ((abs_optab->handlers[(int) operand_mode].insn_code
10051 != CODE_FOR_nothing)
10052 || (ffs_optab->handlers[(int) operand_mode].insn_code
10053 != CODE_FOR_nothing)))
10059 if (! get_subtarget (target)
10060 || GET_MODE (subtarget) != operand_mode
10061 || ! safe_from_p (subtarget, arg1, 1))
10064 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10065 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10068 target = gen_reg_rtx (mode);
10070 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10071 because, if the emit_store_flag does anything it will succeed and
10072 OP0 and OP1 will not be used subsequently. */
10074 result = emit_store_flag (target, code,
10075 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10076 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10077 operand_mode, unsignedp, 1);
10082 result = expand_binop (mode, xor_optab, result, const1_rtx,
10083 result, 0, OPTAB_LIB_WIDEN);
10087 /* If this failed, we have to do this with set/compare/jump/set code. */
10088 if (GET_CODE (target) != REG
10089 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10090 target = gen_reg_rtx (GET_MODE (target));
10092 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10093 result = compare_from_rtx (op0, op1, code, unsignedp,
10094 operand_mode, NULL_RTX);
10095 if (GET_CODE (result) == CONST_INT)
10096 return (((result == const0_rtx && ! invert)
10097 || (result != const0_rtx && invert))
10098 ? const0_rtx : const1_rtx);
10100 /* The code of RESULT may not match CODE if compare_from_rtx
10101 decided to swap its operands and reverse the original code.
10103 We know that compare_from_rtx returns either a CONST_INT or
10104 a new comparison code, so it is safe to just extract the
10105 code from RESULT. */
10106 code = GET_CODE (result);
10108 label = gen_label_rtx ();
10109 if (bcc_gen_fctn[(int) code] == 0)
10112 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10113 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10114 emit_label (label);
10120 /* Stubs in case we haven't got a casesi insn. */
10121 #ifndef HAVE_casesi
10122 # define HAVE_casesi 0
10123 # define gen_casesi(a, b, c, d, e) (0)
10124 # define CODE_FOR_casesi CODE_FOR_nothing
10127 /* If the machine does not have a case insn that compares the bounds,
10128 this means extra overhead for dispatch tables, which raises the
10129 threshold for using them. */
10130 #ifndef CASE_VALUES_THRESHOLD
10131 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10132 #endif /* CASE_VALUES_THRESHOLD */
10135 case_values_threshold (void)
10137 return CASE_VALUES_THRESHOLD;
10140 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10141 0 otherwise (i.e. if there is no casesi instruction). */
10143 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10144 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10146 enum machine_mode index_mode = SImode;
10147 int index_bits = GET_MODE_BITSIZE (index_mode);
10148 rtx op1, op2, index;
10149 enum machine_mode op_mode;
10154 /* Convert the index to SImode. */
10155 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10157 enum machine_mode omode = TYPE_MODE (index_type);
10158 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10160 /* We must handle the endpoints in the original mode. */
10161 index_expr = build (MINUS_EXPR, index_type,
10162 index_expr, minval);
10163 minval = integer_zero_node;
10164 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10165 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10166 omode, 1, default_label);
10167 /* Now we can safely truncate. */
10168 index = convert_to_mode (index_mode, index, 0);
10172 if (TYPE_MODE (index_type) != index_mode)
10174 index_expr = convert ((*lang_hooks.types.type_for_size)
10175 (index_bits, 0), index_expr);
10176 index_type = TREE_TYPE (index_expr);
10179 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10182 index = protect_from_queue (index, 0);
10183 do_pending_stack_adjust ();
10185 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10186 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10188 index = copy_to_mode_reg (op_mode, index);
10190 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10192 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10193 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10194 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10195 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10197 op1 = copy_to_mode_reg (op_mode, op1);
10199 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10201 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10202 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10203 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10204 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10206 op2 = copy_to_mode_reg (op_mode, op2);
10208 emit_jump_insn (gen_casesi (index, op1, op2,
10209 table_label, default_label));
10213 /* Attempt to generate a tablejump instruction; same concept. */
10214 #ifndef HAVE_tablejump
10215 #define HAVE_tablejump 0
10216 #define gen_tablejump(x, y) (0)
10219 /* Subroutine of the next function.
10221 INDEX is the value being switched on, with the lowest value
10222 in the table already subtracted.
10223 MODE is its expected mode (needed if INDEX is constant).
10224 RANGE is the length of the jump table.
10225 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10227 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10228 index value is out of range. */
10231 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10236 if (INTVAL (range) > cfun->max_jumptable_ents)
10237 cfun->max_jumptable_ents = INTVAL (range);
10239 /* Do an unsigned comparison (in the proper mode) between the index
10240 expression and the value which represents the length of the range.
10241 Since we just finished subtracting the lower bound of the range
10242 from the index expression, this comparison allows us to simultaneously
10243 check that the original index expression value is both greater than
10244 or equal to the minimum value of the range and less than or equal to
10245 the maximum value of the range. */
10247 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10250 /* If index is in range, it must fit in Pmode.
10251 Convert to Pmode so we can index with it. */
10253 index = convert_to_mode (Pmode, index, 1);
10255 /* Don't let a MEM slip thru, because then INDEX that comes
10256 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10257 and break_out_memory_refs will go to work on it and mess it up. */
10258 #ifdef PIC_CASE_VECTOR_ADDRESS
10259 if (flag_pic && GET_CODE (index) != REG)
10260 index = copy_to_mode_reg (Pmode, index);
10263 /* If flag_force_addr were to affect this address
10264 it could interfere with the tricky assumptions made
10265 about addresses that contain label-refs,
10266 which may be valid only very near the tablejump itself. */
10267 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10268 GET_MODE_SIZE, because this indicates how large insns are. The other
10269 uses should all be Pmode, because they are addresses. This code
10270 could fail if addresses and insns are not the same size. */
10271 index = gen_rtx_PLUS (Pmode,
10272 gen_rtx_MULT (Pmode, index,
10273 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10274 gen_rtx_LABEL_REF (Pmode, table_label));
10275 #ifdef PIC_CASE_VECTOR_ADDRESS
10277 index = PIC_CASE_VECTOR_ADDRESS (index);
10280 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10281 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10282 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10283 RTX_UNCHANGING_P (vector) = 1;
10284 MEM_NOTRAP_P (vector) = 1;
10285 convert_move (temp, vector, 0);
10287 emit_jump_insn (gen_tablejump (temp, table_label));
10289 /* If we are generating PIC code or if the table is PC-relative, the
10290 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10291 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10296 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10297 rtx table_label, rtx default_label)
10301 if (! HAVE_tablejump)
10304 index_expr = fold (build (MINUS_EXPR, index_type,
10305 convert (index_type, index_expr),
10306 convert (index_type, minval)));
10307 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10309 index = protect_from_queue (index, 0);
10310 do_pending_stack_adjust ();
10312 do_tablejump (index, TYPE_MODE (index_type),
10313 convert_modes (TYPE_MODE (index_type),
10314 TYPE_MODE (TREE_TYPE (range)),
10315 expand_expr (range, NULL_RTX,
10317 TREE_UNSIGNED (TREE_TYPE (range))),
10318 table_label, default_label);
10322 /* Nonzero if the mode is a valid vector mode for this architecture.
10323 This returns nonzero even if there is no hardware support for the
10324 vector mode, but we can emulate with narrower modes. */
10327 vector_mode_valid_p (enum machine_mode mode)
10329 enum mode_class class = GET_MODE_CLASS (mode);
10330 enum machine_mode innermode;
10332 /* Doh! What's going on? */
10333 if (class != MODE_VECTOR_INT
10334 && class != MODE_VECTOR_FLOAT)
10337 /* Hardware support. Woo hoo! */
10338 if (VECTOR_MODE_SUPPORTED_P (mode))
10341 innermode = GET_MODE_INNER (mode);
10343 /* We should probably return 1 if requesting V4DI and we have no DI,
10344 but we have V2DI, but this is probably very unlikely. */
10346 /* If we have support for the inner mode, we can safely emulate it.
10347 We may not have V2DI, but me can emulate with a pair of DIs. */
10348 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10351 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10353 const_vector_from_tree (tree exp)
10358 enum machine_mode inner, mode;
10360 mode = TYPE_MODE (TREE_TYPE (exp));
10362 if (is_zeros_p (exp))
10363 return CONST0_RTX (mode);
10365 units = GET_MODE_NUNITS (mode);
10366 inner = GET_MODE_INNER (mode);
10368 v = rtvec_alloc (units);
10370 link = TREE_VECTOR_CST_ELTS (exp);
10371 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10373 elt = TREE_VALUE (link);
10375 if (TREE_CODE (elt) == REAL_CST)
10376 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10379 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10380 TREE_INT_CST_HIGH (elt),
10384 /* Initialize remaining elements to 0. */
10385 for (; i < units; ++i)
10386 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10388 return gen_rtx_raw_CONST_VECTOR (mode, v);
10391 #include "gt-expr.h"