1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static int mostly_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
170 static void emit_single_push_insn (enum machine_mode, rtx, tree);
172 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
173 static rtx const_vector_from_tree (tree);
175 /* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
179 static char direct_load[NUM_MACHINE_MODES];
180 static char direct_store[NUM_MACHINE_MODES];
182 /* Record for each mode whether we can float-extend from memory. */
184 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
209 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210 #define CLEAR_RATIO 2
212 /* If we are optimizing for space, cut down the default clear ratio. */
213 #define CLEAR_RATIO (optimize_size ? 3 : 15)
217 /* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219 #ifndef CLEAR_BY_PIECES_P
220 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 /* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227 #ifndef STORE_BY_PIECES_P
228 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
243 /* This is run once per compilation to set up which modes can be used
244 directly in memory and to initialize the block move optab. */
247 init_expr_once (void)
250 enum machine_mode mode;
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg = gen_rtx_REG (VOIDmode, -1);
265 insn = rtx_alloc (INSN);
266 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267 PATTERN (insn) = pat;
269 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270 mode = (enum machine_mode) ((int) mode + 1))
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
277 PUT_MODE (reg, mode);
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
282 if (mode != VOIDmode && mode != BLKmode)
283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
287 if (! HARD_REGNO_MODE_OK (regno, mode))
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
297 SET_SRC (pat) = mem1;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
303 SET_DEST (pat) = mem;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
308 SET_DEST (pat) = mem1;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
314 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317 mode = GET_MODE_WIDER_MODE (mode))
319 enum machine_mode srcmode;
320 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321 srcmode = GET_MODE_WIDER_MODE (srcmode))
325 ic = can_extend_p (mode, srcmode, 0);
326 if (ic == CODE_FOR_nothing)
329 PUT_MODE (mem, srcmode);
331 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332 float_extend_from_mem[mode][srcmode] = true;
337 /* This is run at the start of compiling a function. */
342 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
345 pending_stack_adjust = 0;
346 stack_pointer_delta = 0;
347 inhibit_defer_pop = 0;
349 apply_args_value = 0;
353 /* Small sanity check that the queue is empty at the end of a function. */
356 finish_expr_for_function (void)
362 /* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
365 /* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
373 enqueue_insn (rtx var, rtx body)
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (rtx x, int modify)
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
480 queued_subexp_p (rtx x)
482 enum rtx_code code = GET_CODE (x);
488 return queued_subexp_p (XEXP (x, 0));
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
499 /* Perform all the pending incrementations. */
505 while ((p = pending_chain))
507 rtx body = QUEUED_BODY (p);
509 switch (GET_CODE (body))
517 QUEUED_INSN (p) = body;
521 #ifdef ENABLE_CHECKING
528 QUEUED_INSN (p) = emit_insn (body);
532 pending_chain = QUEUED_NEXT (p);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
542 convert_move (rtx to, rtx from, int unsignedp)
544 enum machine_mode to_mode = GET_MODE (to);
545 enum machine_mode from_mode = GET_MODE (from);
546 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
547 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 /* rtx code for making an equivalent value. */
552 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
553 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
558 if (to_real != from_real)
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 emit_move_insn (to, from);
581 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
583 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 if (VECTOR_MODE_P (to_mode))
587 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
589 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
591 emit_move_insn (to, from);
595 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
597 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
598 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
602 if (to_real != from_real)
609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
615 emit_unop_insn (code, to, from, UNKNOWN);
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
771 libcall = extendsfdf2_libfunc;
775 libcall = extendsfxf2_libfunc;
779 libcall = extendsftf2_libfunc;
791 libcall = truncdfsf2_libfunc;
795 libcall = extenddfxf2_libfunc;
799 libcall = extenddftf2_libfunc;
811 libcall = truncxfsf2_libfunc;
815 libcall = truncxfdf2_libfunc;
827 libcall = trunctfsf2_libfunc;
831 libcall = trunctfdf2_libfunc;
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
848 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
850 insns = get_insns ();
852 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
857 /* Now both modes are integers. */
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
881 emit_unop_insn (code, to, from, equiv_code);
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
889 if (GET_CODE (to) == REG)
890 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
897 /* No special multiword conversion insn; do it by hand. */
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
910 lowpart_mode = from_mode;
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
917 /* Compute the value to put in each remaining word. */
919 fill_value = const0_rtx;
924 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
925 && STORE_FLAG_VALUE == -1)
927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
956 insns = get_insns ();
959 emit_no_conflict_block (insns, to, from, NULL_RTX,
960 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
975 convert_move (to, gen_lowpart (word_mode, from), 0);
979 /* Handle pointer conversion. */ /* SPEE 900220. */
980 if (to_mode == PQImode)
982 if (from_mode != QImode)
983 from = convert_to_mode (QImode, from, unsignedp);
985 #ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2)
988 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
991 #endif /* HAVE_truncqipqi2 */
995 if (from_mode == PQImode)
997 if (to_mode != QImode)
999 from = convert_to_mode (QImode, from, unsignedp);
1004 #ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2)
1007 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1010 #endif /* HAVE_extendpqiqi2 */
1015 if (to_mode == PSImode)
1017 if (from_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1020 #ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2)
1023 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1026 #endif /* HAVE_truncsipsi2 */
1030 if (from_mode == PSImode)
1032 if (to_mode != SImode)
1034 from = convert_to_mode (SImode, from, unsignedp);
1039 #ifdef HAVE_extendpsisi2
1040 if (! unsignedp && HAVE_extendpsisi2)
1042 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1045 #endif /* HAVE_extendpsisi2 */
1046 #ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp && HAVE_zero_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_zero_extendpsisi2 */
1057 if (to_mode == PDImode)
1059 if (from_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1062 #ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2)
1065 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1068 #endif /* HAVE_truncdipdi2 */
1072 if (from_mode == PDImode)
1074 if (to_mode != DImode)
1076 from = convert_to_mode (DImode, from, unsignedp);
1081 #ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2)
1084 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1087 #endif /* HAVE_extendpdidi2 */
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (from_mode)))
1100 if (!((GET_CODE (from) == MEM
1101 && ! MEM_VOLATILE_P (from)
1102 && direct_load[(int) to_mode]
1103 && ! mode_dependent_address_p (XEXP (from, 0)))
1104 || GET_CODE (from) == REG
1105 || GET_CODE (from) == SUBREG))
1106 from = force_reg (from_mode, from);
1107 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1109 from = copy_to_reg (from);
1110 emit_move_insn (to, gen_lowpart (to_mode, from));
1114 /* Handle extension. */
1115 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1117 /* Convert directly if that works. */
1118 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1119 != CODE_FOR_nothing)
1122 from = force_not_mem (from);
1124 emit_unop_insn (code, to, from, equiv_code);
1129 enum machine_mode intermediate;
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1140 GET_MODE_BITSIZE (intermediate))))
1141 && (can_extend_p (intermediate, from_mode, unsignedp)
1142 != CODE_FOR_nothing))
1144 convert_move (to, convert_to_mode (intermediate, from,
1145 unsignedp), unsignedp);
1149 /* No suitable intermediate mode.
1150 Generate what we need with shifts. */
1151 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1152 - GET_MODE_BITSIZE (from_mode), 0);
1153 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1154 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1156 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1159 emit_move_insn (to, tmp);
1164 /* Support special truncate insns for certain modes. */
1166 if (from_mode == DImode && to_mode == SImode)
1168 #ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2)
1171 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == DImode && to_mode == HImode)
1181 #ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2)
1184 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == DImode && to_mode == QImode)
1194 #ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2)
1197 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == SImode && to_mode == HImode)
1207 #ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2)
1210 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == SImode && to_mode == QImode)
1220 #ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2)
1223 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 if (from_mode == HImode && to_mode == QImode)
1233 #ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2)
1236 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 if (from_mode == TImode && to_mode == DImode)
1246 #ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2)
1249 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1257 if (from_mode == TImode && to_mode == SImode)
1259 #ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2)
1262 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1266 convert_move (to, force_reg (from_mode, from), unsignedp);
1270 if (from_mode == TImode && to_mode == HImode)
1272 #ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2)
1275 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1279 convert_move (to, force_reg (from_mode, from), unsignedp);
1283 if (from_mode == TImode && to_mode == QImode)
1285 #ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2)
1288 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1292 convert_move (to, force_reg (from_mode, from), unsignedp);
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1301 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1302 emit_move_insn (to, temp);
1306 /* Mode combination is not recognized. */
1310 /* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
1321 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1323 return convert_modes (mode, VOIDmode, x, unsignedp);
1326 /* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1340 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1347 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1350 x = gen_lowpart (mode, x);
1352 if (GET_MODE (x) != VOIDmode)
1353 oldmode = GET_MODE (x);
1355 if (mode == oldmode)
1358 /* There is one case that we must handle specially: If we are converting
1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1364 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1366 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1368 HOST_WIDE_INT val = INTVAL (x);
1370 if (oldmode != VOIDmode
1371 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We need to zero extend VAL. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1387 if ((GET_CODE (x) == CONST_INT
1388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1389 || (GET_MODE_CLASS (mode) == MODE_INT
1390 && GET_MODE_CLASS (oldmode) == MODE_INT
1391 && (GET_CODE (x) == CONST_DOUBLE
1392 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1393 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1394 && direct_load[(int) mode])
1395 || (GET_CODE (x) == REG
1396 && (! HARD_REGISTER_P (x)
1397 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1417 return gen_int_mode (val, mode);
1420 return gen_lowpart (mode, x);
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1428 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1429 store efficiently. Due to internal GCC limitations, this is
1430 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1431 for an immediate constant. */
1433 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1435 /* Determine whether the LEN bytes can be moved by using several move
1436 instructions. Return nonzero if a call to move_by_pieces should
1440 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1441 unsigned int align ATTRIBUTE_UNUSED)
1443 return MOVE_BY_PIECES_P (len, align);
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1453 ALIGN is maximum stack alignment we can assume.
1455 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1456 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1460 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1461 unsigned int align, int endp)
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1469 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1472 data.from_addr = from_addr;
1475 to_addr = XEXP (to, 0);
1478 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1479 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1481 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1488 #ifdef STACK_GROWS_DOWNWARD
1494 data.to_addr = to_addr;
1497 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1498 || GET_CODE (from_addr) == POST_INC
1499 || GET_CODE (from_addr) == POST_DEC);
1501 data.explicit_inc_from = 0;
1502 data.explicit_inc_to = 0;
1503 if (data.reverse) data.offset = len;
1506 /* If copying requires more than two move insns,
1507 copy addresses to registers (to make displacements shorter)
1508 and use post-increment if available. */
1509 if (!(data.autinc_from && data.autinc_to)
1510 && move_by_pieces_ninsns (len, align) > 2)
1512 /* Find the mode of the largest move... */
1513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515 if (GET_MODE_SIZE (tmode) < max_size)
1518 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1520 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1521 data.autinc_from = 1;
1522 data.explicit_inc_from = -1;
1524 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = 1;
1530 if (!data.autinc_from && CONSTANT_P (from_addr))
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1534 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1536 data.explicit_inc_to = -1;
1538 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1540 data.to_addr = copy_addr_to_reg (to_addr);
1542 data.explicit_inc_to = 1;
1544 if (!data.autinc_to && CONSTANT_P (to_addr))
1545 data.to_addr = copy_addr_to_reg (to_addr);
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550 align = MOVE_MAX * BITS_PER_UNIT;
1552 /* First move what we can in the largest integer mode, then go to
1553 successively smaller modes. */
1555 while (max_size > 1)
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1562 if (mode == VOIDmode)
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1569 max_size = GET_MODE_SIZE (mode);
1572 /* The code above should have handled everything. */
1586 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1587 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1589 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1592 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1599 to1 = adjust_address (data.to, QImode, data.offset);
1607 /* Return number of insns required to move L bytes by pieces.
1608 ALIGN (in bits) is maximum alignment we can assume. */
1610 static unsigned HOST_WIDE_INT
1611 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1613 unsigned HOST_WIDE_INT n_insns = 0;
1614 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1616 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1617 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1618 align = MOVE_MAX * BITS_PER_UNIT;
1620 while (max_size > 1)
1622 enum machine_mode mode = VOIDmode, tmode;
1623 enum insn_code icode;
1625 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1626 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1627 if (GET_MODE_SIZE (tmode) < max_size)
1630 if (mode == VOIDmode)
1633 icode = mov_optab->handlers[(int) mode].insn_code;
1634 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1635 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1637 max_size = GET_MODE_SIZE (mode);
1645 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1646 with move instructions for mode MODE. GENFUN is the gen_... function
1647 to make a move insn for that mode. DATA has all the other info. */
1650 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1651 struct move_by_pieces *data)
1653 unsigned int size = GET_MODE_SIZE (mode);
1654 rtx to1 = NULL_RTX, from1;
1656 while (data->len >= size)
1659 data->offset -= size;
1663 if (data->autinc_to)
1664 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1667 to1 = adjust_address (data->to, mode, data->offset);
1670 if (data->autinc_from)
1671 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1674 from1 = adjust_address (data->from, mode, data->offset);
1676 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1677 emit_insn (gen_add2_insn (data->to_addr,
1678 GEN_INT (-(HOST_WIDE_INT)size)));
1679 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1680 emit_insn (gen_add2_insn (data->from_addr,
1681 GEN_INT (-(HOST_WIDE_INT)size)));
1684 emit_insn ((*genfun) (to1, from1));
1687 #ifdef PUSH_ROUNDING
1688 emit_single_push_insn (mode, from1, NULL);
1694 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1695 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1696 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1697 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1699 if (! data->reverse)
1700 data->offset += size;
1706 /* Emit code to move a block Y to a block X. This may be done with
1707 string-move instructions, with multiple scalar move instructions,
1708 or with a library call.
1710 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1711 SIZE is an rtx that says how long they are.
1712 ALIGN is the maximum alignment we can assume they have.
1713 METHOD describes what kind of copy this is, and what mechanisms may be used.
1715 Return the address of the new block, if memcpy is called and returns it,
1719 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1727 case BLOCK_OP_NORMAL:
1728 may_use_call = true;
1731 case BLOCK_OP_CALL_PARM:
1732 may_use_call = block_move_libcall_safe_for_call_parm ();
1734 /* Make inhibit_defer_pop nonzero around the library call
1735 to force it to pop the arguments right away. */
1739 case BLOCK_OP_NO_LIBCALL:
1740 may_use_call = false;
1747 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1749 if (GET_MODE (x) != BLKmode)
1751 if (GET_MODE (y) != BLKmode)
1754 x = protect_from_queue (x, 1);
1755 y = protect_from_queue (y, 0);
1756 size = protect_from_queue (size, 0);
1758 if (GET_CODE (x) != MEM)
1760 if (GET_CODE (y) != MEM)
1765 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1766 can be incorrect is coming from __builtin_memcpy. */
1767 if (GET_CODE (size) == CONST_INT)
1769 x = shallow_copy_rtx (x);
1770 y = shallow_copy_rtx (y);
1771 set_mem_size (x, size);
1772 set_mem_size (y, size);
1775 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1776 move_by_pieces (x, y, INTVAL (size), align, 0);
1777 else if (emit_block_move_via_movstr (x, y, size, align))
1779 else if (may_use_call)
1780 retval = emit_block_move_via_libcall (x, y, size);
1782 emit_block_move_via_loop (x, y, size, align);
1784 if (method == BLOCK_OP_CALL_PARM)
1790 /* A subroutine of emit_block_move. Returns true if calling the
1791 block move libcall will not clobber any parameters which may have
1792 already been placed on the stack. */
1795 block_move_libcall_safe_for_call_parm (void)
1801 /* Check to see whether memcpy takes all register arguments. */
1803 takes_regs_uninit, takes_regs_no, takes_regs_yes
1804 } takes_regs = takes_regs_uninit;
1808 case takes_regs_uninit:
1810 CUMULATIVE_ARGS args_so_far;
1813 fn = emit_block_move_libcall_fn (false);
1814 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1816 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1817 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1819 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1820 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1821 if (!tmp || !REG_P (tmp))
1822 goto fail_takes_regs;
1823 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1824 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1826 goto fail_takes_regs;
1828 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1831 takes_regs = takes_regs_yes;
1834 case takes_regs_yes:
1838 takes_regs = takes_regs_no;
1849 /* A subroutine of emit_block_move. Expand a movstr pattern;
1850 return true if successful. */
1853 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1855 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1856 enum machine_mode mode;
1858 /* Since this is a move insn, we don't care about volatility. */
1861 /* Try the most limited insn first, because there's no point
1862 including more than one in the machine description unless
1863 the more limited one has some advantage. */
1865 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1866 mode = GET_MODE_WIDER_MODE (mode))
1868 enum insn_code code = movstr_optab[(int) mode];
1869 insn_operand_predicate_fn pred;
1871 if (code != CODE_FOR_nothing
1872 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1873 here because if SIZE is less than the mode mask, as it is
1874 returned by the macro, it will definitely be less than the
1875 actual mode mask. */
1876 && ((GET_CODE (size) == CONST_INT
1877 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1878 <= (GET_MODE_MASK (mode) >> 1)))
1879 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1880 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1881 || (*pred) (x, BLKmode))
1882 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1883 || (*pred) (y, BLKmode))
1884 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1885 || (*pred) (opalign, VOIDmode)))
1888 rtx last = get_last_insn ();
1891 op2 = convert_to_mode (mode, size, 1);
1892 pred = insn_data[(int) code].operand[2].predicate;
1893 if (pred != 0 && ! (*pred) (op2, mode))
1894 op2 = copy_to_mode_reg (mode, op2);
1896 /* ??? When called via emit_block_move_for_call, it'd be
1897 nice if there were some way to inform the backend, so
1898 that it doesn't fail the expansion because it thinks
1899 emitting the libcall would be more efficient. */
1901 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1909 delete_insns_since (last);
1917 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1918 Return the return value from memcpy, 0 otherwise. */
1921 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1923 rtx dst_addr, src_addr;
1924 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1925 enum machine_mode size_mode;
1928 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1930 It is unsafe to save the value generated by protect_from_queue and reuse
1931 it later. Consider what happens if emit_queue is called before the
1932 return value from protect_from_queue is used.
1934 Expansion of the CALL_EXPR below will call emit_queue before we are
1935 finished emitting RTL for argument setup. So if we are not careful we
1936 could get the wrong value for an argument.
1938 To avoid this problem we go ahead and emit code to copy the addresses of
1939 DST and SRC and SIZE into new pseudos. We can then place those new
1940 pseudos into an RTL_EXPR and use them later, even after a call to
1943 Note this is not strictly needed for library calls since they do not call
1944 emit_queue before loading their arguments. However, we may need to have
1945 library calls call emit_queue in the future since failing to do so could
1946 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1947 arguments in registers. */
1949 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1950 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1952 #ifdef POINTERS_EXTEND_UNSIGNED
1953 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1954 src_addr = convert_memory_address (ptr_mode, src_addr);
1957 dst_tree = make_tree (ptr_type_node, dst_addr);
1958 src_tree = make_tree (ptr_type_node, src_addr);
1960 if (TARGET_MEM_FUNCTIONS)
1961 size_mode = TYPE_MODE (sizetype);
1963 size_mode = TYPE_MODE (unsigned_type_node);
1965 size = convert_to_mode (size_mode, size, 1);
1966 size = copy_to_mode_reg (size_mode, size);
1968 /* It is incorrect to use the libcall calling conventions to call
1969 memcpy in this context. This could be a user call to memcpy and
1970 the user may wish to examine the return value from memcpy. For
1971 targets where libcalls and normal calls have different conventions
1972 for returning pointers, we could end up generating incorrect code.
1974 For convenience, we generate the call to bcopy this way as well. */
1976 if (TARGET_MEM_FUNCTIONS)
1977 size_tree = make_tree (sizetype, size);
1979 size_tree = make_tree (unsigned_type_node, size);
1981 fn = emit_block_move_libcall_fn (true);
1982 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1983 if (TARGET_MEM_FUNCTIONS)
1985 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1986 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1990 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1991 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1994 /* Now we have to build up the CALL_EXPR itself. */
1995 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1996 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1997 call_expr, arg_list, NULL_TREE);
1998 TREE_SIDE_EFFECTS (call_expr) = 1;
2000 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2002 /* If we are initializing a readonly value, show the above call clobbered
2003 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2004 the delay slot scheduler might overlook conflicts and take nasty
2006 if (RTX_UNCHANGING_P (dst))
2007 add_function_usage_to
2008 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2009 gen_rtx_CLOBBER (VOIDmode, dst),
2012 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2015 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2016 for the function we use for block copies. The first time FOR_CALL
2017 is true, we call assemble_external. */
2019 static GTY(()) tree block_move_fn;
2022 init_block_move_fn (const char *asmspec)
2028 if (TARGET_MEM_FUNCTIONS)
2030 fn = get_identifier ("memcpy");
2031 args = build_function_type_list (ptr_type_node, ptr_type_node,
2032 const_ptr_type_node, sizetype,
2037 fn = get_identifier ("bcopy");
2038 args = build_function_type_list (void_type_node, const_ptr_type_node,
2039 ptr_type_node, unsigned_type_node,
2043 fn = build_decl (FUNCTION_DECL, fn, args);
2044 DECL_EXTERNAL (fn) = 1;
2045 TREE_PUBLIC (fn) = 1;
2046 DECL_ARTIFICIAL (fn) = 1;
2047 TREE_NOTHROW (fn) = 1;
2054 SET_DECL_RTL (block_move_fn, NULL_RTX);
2055 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2060 emit_block_move_libcall_fn (int for_call)
2062 static bool emitted_extern;
2065 init_block_move_fn (NULL);
2067 if (for_call && !emitted_extern)
2069 emitted_extern = true;
2070 make_decl_rtl (block_move_fn, NULL);
2071 assemble_external (block_move_fn);
2074 return block_move_fn;
2077 /* A subroutine of emit_block_move. Copy the data via an explicit
2078 loop. This is used only when libcalls are forbidden. */
2079 /* ??? It'd be nice to copy in hunks larger than QImode. */
2082 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2083 unsigned int align ATTRIBUTE_UNUSED)
2085 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2086 enum machine_mode iter_mode;
2088 iter_mode = GET_MODE (size);
2089 if (iter_mode == VOIDmode)
2090 iter_mode = word_mode;
2092 top_label = gen_label_rtx ();
2093 cmp_label = gen_label_rtx ();
2094 iter = gen_reg_rtx (iter_mode);
2096 emit_move_insn (iter, const0_rtx);
2098 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2099 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2100 do_pending_stack_adjust ();
2102 emit_note (NOTE_INSN_LOOP_BEG);
2104 emit_jump (cmp_label);
2105 emit_label (top_label);
2107 tmp = convert_modes (Pmode, iter_mode, iter, true);
2108 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2109 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2110 x = change_address (x, QImode, x_addr);
2111 y = change_address (y, QImode, y_addr);
2113 emit_move_insn (x, y);
2115 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2116 true, OPTAB_LIB_WIDEN);
2118 emit_move_insn (iter, tmp);
2120 emit_note (NOTE_INSN_LOOP_CONT);
2121 emit_label (cmp_label);
2123 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2126 emit_note (NOTE_INSN_LOOP_END);
2129 /* Copy all or part of a value X into registers starting at REGNO.
2130 The number of registers to be filled is NREGS. */
2133 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2136 #ifdef HAVE_load_multiple
2144 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2145 x = validize_mem (force_const_mem (mode, x));
2147 /* See if the machine can do this with a load multiple insn. */
2148 #ifdef HAVE_load_multiple
2149 if (HAVE_load_multiple)
2151 last = get_last_insn ();
2152 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2160 delete_insns_since (last);
2164 for (i = 0; i < nregs; i++)
2165 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2166 operand_subword_force (x, i, mode));
2169 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2170 The number of registers to be filled is NREGS. */
2173 move_block_from_reg (int regno, rtx x, int nregs)
2180 /* See if the machine can do this with a store multiple insn. */
2181 #ifdef HAVE_store_multiple
2182 if (HAVE_store_multiple)
2184 rtx last = get_last_insn ();
2185 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2193 delete_insns_since (last);
2197 for (i = 0; i < nregs; i++)
2199 rtx tem = operand_subword (x, i, 1, BLKmode);
2204 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2208 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2209 ORIG, where ORIG is a non-consecutive group of registers represented by
2210 a PARALLEL. The clone is identical to the original except in that the
2211 original set of registers is replaced by a new set of pseudo registers.
2212 The new set has the same modes as the original set. */
2215 gen_group_rtx (rtx orig)
2220 if (GET_CODE (orig) != PARALLEL)
2223 length = XVECLEN (orig, 0);
2224 tmps = (rtx *) alloca (sizeof (rtx) * length);
2226 /* Skip a NULL entry in first slot. */
2227 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2232 for (; i < length; i++)
2234 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2235 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2237 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2240 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2243 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2244 where DST is non-consecutive registers represented by a PARALLEL.
2245 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2249 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2254 if (GET_CODE (dst) != PARALLEL)
2257 /* Check for a NULL entry, used to indicate that the parameter goes
2258 both on the stack and in registers. */
2259 if (XEXP (XVECEXP (dst, 0, 0), 0))
2264 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2266 /* Process the pieces. */
2267 for (i = start; i < XVECLEN (dst, 0); i++)
2269 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2270 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2271 unsigned int bytelen = GET_MODE_SIZE (mode);
2274 /* Handle trailing fragments that run over the size of the struct. */
2275 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2277 /* Arrange to shift the fragment to where it belongs.
2278 extract_bit_field loads to the lsb of the reg. */
2280 #ifdef BLOCK_REG_PADDING
2281 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2282 == (BYTES_BIG_ENDIAN ? upward : downward)
2287 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2288 bytelen = ssize - bytepos;
2293 /* If we won't be loading directly from memory, protect the real source
2294 from strange tricks we might play; but make sure that the source can
2295 be loaded directly into the destination. */
2297 if (GET_CODE (orig_src) != MEM
2298 && (!CONSTANT_P (orig_src)
2299 || (GET_MODE (orig_src) != mode
2300 && GET_MODE (orig_src) != VOIDmode)))
2302 if (GET_MODE (orig_src) == VOIDmode)
2303 src = gen_reg_rtx (mode);
2305 src = gen_reg_rtx (GET_MODE (orig_src));
2307 emit_move_insn (src, orig_src);
2310 /* Optimize the access just a bit. */
2311 if (GET_CODE (src) == MEM
2312 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2313 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2314 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2315 && bytelen == GET_MODE_SIZE (mode))
2317 tmps[i] = gen_reg_rtx (mode);
2318 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2320 else if (GET_CODE (src) == CONCAT)
2322 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2323 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2325 if ((bytepos == 0 && bytelen == slen0)
2326 || (bytepos != 0 && bytepos + bytelen <= slen))
2328 /* The following assumes that the concatenated objects all
2329 have the same size. In this case, a simple calculation
2330 can be used to determine the object and the bit field
2332 tmps[i] = XEXP (src, bytepos / slen0);
2333 if (! CONSTANT_P (tmps[i])
2334 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2335 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2336 (bytepos % slen0) * BITS_PER_UNIT,
2337 1, NULL_RTX, mode, mode, ssize);
2339 else if (bytepos == 0)
2341 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2342 emit_move_insn (mem, src);
2343 tmps[i] = adjust_address (mem, mode, 0);
2348 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2349 SIMD register, which is currently broken. While we get GCC
2350 to emit proper RTL for these cases, let's dump to memory. */
2351 else if (VECTOR_MODE_P (GET_MODE (dst))
2352 && GET_CODE (src) == REG)
2354 int slen = GET_MODE_SIZE (GET_MODE (src));
2357 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2358 emit_move_insn (mem, src);
2359 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2361 else if (CONSTANT_P (src)
2362 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2365 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2366 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2370 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2371 tmps[i], 0, OPTAB_WIDEN);
2376 /* Copy the extracted pieces into the proper (probable) hard regs. */
2377 for (i = start; i < XVECLEN (dst, 0); i++)
2378 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2381 /* Emit code to move a block SRC to block DST, where SRC and DST are
2382 non-consecutive groups of registers, each represented by a PARALLEL. */
2385 emit_group_move (rtx dst, rtx src)
2389 if (GET_CODE (src) != PARALLEL
2390 || GET_CODE (dst) != PARALLEL
2391 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2394 /* Skip first entry if NULL. */
2395 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2396 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2397 XEXP (XVECEXP (src, 0, i), 0));
2400 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2401 where SRC is non-consecutive registers represented by a PARALLEL.
2402 SSIZE represents the total size of block ORIG_DST, or -1 if not
2406 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2411 if (GET_CODE (src) != PARALLEL)
2414 /* Check for a NULL entry, used to indicate that the parameter goes
2415 both on the stack and in registers. */
2416 if (XEXP (XVECEXP (src, 0, 0), 0))
2421 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2423 /* Copy the (probable) hard regs into pseudos. */
2424 for (i = start; i < XVECLEN (src, 0); i++)
2426 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2427 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2428 emit_move_insn (tmps[i], reg);
2432 /* If we won't be storing directly into memory, protect the real destination
2433 from strange tricks we might play. */
2435 if (GET_CODE (dst) == PARALLEL)
2439 /* We can get a PARALLEL dst if there is a conditional expression in
2440 a return statement. In that case, the dst and src are the same,
2441 so no action is necessary. */
2442 if (rtx_equal_p (dst, src))
2445 /* It is unclear if we can ever reach here, but we may as well handle
2446 it. Allocate a temporary, and split this into a store/load to/from
2449 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2450 emit_group_store (temp, src, type, ssize);
2451 emit_group_load (dst, temp, type, ssize);
2454 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2456 dst = gen_reg_rtx (GET_MODE (orig_dst));
2457 /* Make life a bit easier for combine. */
2458 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2461 /* Process the pieces. */
2462 for (i = start; i < XVECLEN (src, 0); i++)
2464 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2465 enum machine_mode mode = GET_MODE (tmps[i]);
2466 unsigned int bytelen = GET_MODE_SIZE (mode);
2469 /* Handle trailing fragments that run over the size of the struct. */
2470 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2472 /* store_bit_field always takes its value from the lsb.
2473 Move the fragment to the lsb if it's not already there. */
2475 #ifdef BLOCK_REG_PADDING
2476 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2477 == (BYTES_BIG_ENDIAN ? upward : downward)
2483 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2484 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2485 tmps[i], 0, OPTAB_WIDEN);
2487 bytelen = ssize - bytepos;
2490 if (GET_CODE (dst) == CONCAT)
2492 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2493 dest = XEXP (dst, 0);
2494 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2496 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2497 dest = XEXP (dst, 1);
2499 else if (bytepos == 0 && XVECLEN (src, 0))
2501 dest = assign_stack_temp (GET_MODE (dest),
2502 GET_MODE_SIZE (GET_MODE (dest)), 0);
2503 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2512 /* Optimize the access just a bit. */
2513 if (GET_CODE (dest) == MEM
2514 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2515 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2516 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2517 && bytelen == GET_MODE_SIZE (mode))
2518 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2520 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2521 mode, tmps[i], ssize);
2526 /* Copy from the pseudo into the (probable) hard reg. */
2527 if (orig_dst != dst)
2528 emit_move_insn (orig_dst, dst);
2531 /* Generate code to copy a BLKmode object of TYPE out of a
2532 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2533 is null, a stack temporary is created. TGTBLK is returned.
2535 The primary purpose of this routine is to handle functions
2536 that return BLKmode structures in registers. Some machines
2537 (the PA for example) want to return all small structures
2538 in registers regardless of the structure's alignment. */
2541 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2543 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2544 rtx src = NULL, dst = NULL;
2545 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2546 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2550 tgtblk = assign_temp (build_qualified_type (type,
2552 | TYPE_QUAL_CONST)),
2554 preserve_temp_slots (tgtblk);
2557 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2558 into a new pseudo which is a full word. */
2560 if (GET_MODE (srcreg) != BLKmode
2561 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2562 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2564 /* Structures whose size is not a multiple of a word are aligned
2565 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2566 machine, this means we must skip the empty high order bytes when
2567 calculating the bit offset. */
2568 if (BYTES_BIG_ENDIAN
2569 && bytes % UNITS_PER_WORD)
2570 big_endian_correction
2571 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2573 /* Copy the structure BITSIZE bites at a time.
2575 We could probably emit more efficient code for machines which do not use
2576 strict alignment, but it doesn't seem worth the effort at the current
2578 for (bitpos = 0, xbitpos = big_endian_correction;
2579 bitpos < bytes * BITS_PER_UNIT;
2580 bitpos += bitsize, xbitpos += bitsize)
2582 /* We need a new source operand each time xbitpos is on a
2583 word boundary and when xbitpos == big_endian_correction
2584 (the first time through). */
2585 if (xbitpos % BITS_PER_WORD == 0
2586 || xbitpos == big_endian_correction)
2587 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2590 /* We need a new destination operand each time bitpos is on
2592 if (bitpos % BITS_PER_WORD == 0)
2593 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2595 /* Use xbitpos for the source extraction (right justified) and
2596 xbitpos for the destination store (left justified). */
2597 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2598 extract_bit_field (src, bitsize,
2599 xbitpos % BITS_PER_WORD, 1,
2600 NULL_RTX, word_mode, word_mode,
2608 /* Add a USE expression for REG to the (possibly empty) list pointed
2609 to by CALL_FUSAGE. REG must denote a hard register. */
2612 use_reg (rtx *call_fusage, rtx reg)
2614 if (GET_CODE (reg) != REG
2615 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2619 = gen_rtx_EXPR_LIST (VOIDmode,
2620 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2623 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2624 starting at REGNO. All of these registers must be hard registers. */
2627 use_regs (rtx *call_fusage, int regno, int nregs)
2631 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2634 for (i = 0; i < nregs; i++)
2635 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2638 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2639 PARALLEL REGS. This is for calls that pass values in multiple
2640 non-contiguous locations. The Irix 6 ABI has examples of this. */
2643 use_group_regs (rtx *call_fusage, rtx regs)
2647 for (i = 0; i < XVECLEN (regs, 0); i++)
2649 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2651 /* A NULL entry means the parameter goes both on the stack and in
2652 registers. This can also be a MEM for targets that pass values
2653 partially on the stack and partially in registers. */
2654 if (reg != 0 && GET_CODE (reg) == REG)
2655 use_reg (call_fusage, reg);
2660 /* Determine whether the LEN bytes generated by CONSTFUN can be
2661 stored to memory using several move instructions. CONSTFUNDATA is
2662 a pointer which will be passed as argument in every CONSTFUN call.
2663 ALIGN is maximum alignment we can assume. Return nonzero if a
2664 call to store_by_pieces should succeed. */
2667 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2668 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2669 void *constfundata, unsigned int align)
2671 unsigned HOST_WIDE_INT max_size, l;
2672 HOST_WIDE_INT offset = 0;
2673 enum machine_mode mode, tmode;
2674 enum insn_code icode;
2681 if (! STORE_BY_PIECES_P (len, align))
2684 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2685 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2686 align = MOVE_MAX * BITS_PER_UNIT;
2688 /* We would first store what we can in the largest integer mode, then go to
2689 successively smaller modes. */
2692 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2697 max_size = STORE_MAX_PIECES + 1;
2698 while (max_size > 1)
2700 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2701 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2702 if (GET_MODE_SIZE (tmode) < max_size)
2705 if (mode == VOIDmode)
2708 icode = mov_optab->handlers[(int) mode].insn_code;
2709 if (icode != CODE_FOR_nothing
2710 && align >= GET_MODE_ALIGNMENT (mode))
2712 unsigned int size = GET_MODE_SIZE (mode);
2719 cst = (*constfun) (constfundata, offset, mode);
2720 if (!LEGITIMATE_CONSTANT_P (cst))
2730 max_size = GET_MODE_SIZE (mode);
2733 /* The code above should have handled everything. */
2741 /* Generate several move instructions to store LEN bytes generated by
2742 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2743 pointer which will be passed as argument in every CONSTFUN call.
2744 ALIGN is maximum alignment we can assume.
2745 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2746 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2750 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2751 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2752 void *constfundata, unsigned int align, int endp)
2754 struct store_by_pieces data;
2763 if (! STORE_BY_PIECES_P (len, align))
2765 to = protect_from_queue (to, 1);
2766 data.constfun = constfun;
2767 data.constfundata = constfundata;
2770 store_by_pieces_1 (&data, align);
2781 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2782 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2784 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2787 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2794 to1 = adjust_address (data.to, QImode, data.offset);
2802 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2803 rtx with BLKmode). The caller must pass TO through protect_from_queue
2804 before calling. ALIGN is maximum alignment we can assume. */
2807 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2809 struct store_by_pieces data;
2814 data.constfun = clear_by_pieces_1;
2815 data.constfundata = NULL;
2818 store_by_pieces_1 (&data, align);
2821 /* Callback routine for clear_by_pieces.
2822 Return const0_rtx unconditionally. */
2825 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2826 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2827 enum machine_mode mode ATTRIBUTE_UNUSED)
2832 /* Subroutine of clear_by_pieces and store_by_pieces.
2833 Generate several move instructions to store LEN bytes of block TO. (A MEM
2834 rtx with BLKmode). The caller must pass TO through protect_from_queue
2835 before calling. ALIGN is maximum alignment we can assume. */
2838 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2839 unsigned int align ATTRIBUTE_UNUSED)
2841 rtx to_addr = XEXP (data->to, 0);
2842 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2843 enum machine_mode mode = VOIDmode, tmode;
2844 enum insn_code icode;
2847 data->to_addr = to_addr;
2849 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2850 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2852 data->explicit_inc_to = 0;
2854 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2856 data->offset = data->len;
2858 /* If storing requires more than two move insns,
2859 copy addresses to registers (to make displacements shorter)
2860 and use post-increment if available. */
2861 if (!data->autinc_to
2862 && move_by_pieces_ninsns (data->len, align) > 2)
2864 /* Determine the main mode we'll be using. */
2865 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2866 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2867 if (GET_MODE_SIZE (tmode) < max_size)
2870 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2872 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2873 data->autinc_to = 1;
2874 data->explicit_inc_to = -1;
2877 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2878 && ! data->autinc_to)
2880 data->to_addr = copy_addr_to_reg (to_addr);
2881 data->autinc_to = 1;
2882 data->explicit_inc_to = 1;
2885 if ( !data->autinc_to && CONSTANT_P (to_addr))
2886 data->to_addr = copy_addr_to_reg (to_addr);
2889 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2890 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2891 align = MOVE_MAX * BITS_PER_UNIT;
2893 /* First store what we can in the largest integer mode, then go to
2894 successively smaller modes. */
2896 while (max_size > 1)
2898 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2899 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2900 if (GET_MODE_SIZE (tmode) < max_size)
2903 if (mode == VOIDmode)
2906 icode = mov_optab->handlers[(int) mode].insn_code;
2907 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2908 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2910 max_size = GET_MODE_SIZE (mode);
2913 /* The code above should have handled everything. */
2918 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2919 with move instructions for mode MODE. GENFUN is the gen_... function
2920 to make a move insn for that mode. DATA has all the other info. */
2923 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2924 struct store_by_pieces *data)
2926 unsigned int size = GET_MODE_SIZE (mode);
2929 while (data->len >= size)
2932 data->offset -= size;
2934 if (data->autinc_to)
2935 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2938 to1 = adjust_address (data->to, mode, data->offset);
2940 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2941 emit_insn (gen_add2_insn (data->to_addr,
2942 GEN_INT (-(HOST_WIDE_INT) size)));
2944 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2945 emit_insn ((*genfun) (to1, cst));
2947 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2948 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2950 if (! data->reverse)
2951 data->offset += size;
2957 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2958 its length in bytes. */
2961 clear_storage (rtx object, rtx size)
2964 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2965 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2967 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2968 just move a zero. Otherwise, do this a piece at a time. */
2969 if (GET_MODE (object) != BLKmode
2970 && GET_CODE (size) == CONST_INT
2971 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2972 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2975 object = protect_from_queue (object, 1);
2976 size = protect_from_queue (size, 0);
2978 if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2980 else if (GET_CODE (size) == CONST_INT
2981 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2982 clear_by_pieces (object, INTVAL (size), align);
2983 else if (clear_storage_via_clrstr (object, size, align))
2986 retval = clear_storage_via_libcall (object, size);
2992 /* A subroutine of clear_storage. Expand a clrstr pattern;
2993 return true if successful. */
2996 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2998 /* Try the most limited insn first, because there's no point
2999 including more than one in the machine description unless
3000 the more limited one has some advantage. */
3002 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3003 enum machine_mode mode;
3005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3006 mode = GET_MODE_WIDER_MODE (mode))
3008 enum insn_code code = clrstr_optab[(int) mode];
3009 insn_operand_predicate_fn pred;
3011 if (code != CODE_FOR_nothing
3012 /* We don't need MODE to be narrower than
3013 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3014 the mode mask, as it is returned by the macro, it will
3015 definitely be less than the actual mode mask. */
3016 && ((GET_CODE (size) == CONST_INT
3017 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3018 <= (GET_MODE_MASK (mode) >> 1)))
3019 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3020 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3021 || (*pred) (object, BLKmode))
3022 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3023 || (*pred) (opalign, VOIDmode)))
3026 rtx last = get_last_insn ();
3029 op1 = convert_to_mode (mode, size, 1);
3030 pred = insn_data[(int) code].operand[1].predicate;
3031 if (pred != 0 && ! (*pred) (op1, mode))
3032 op1 = copy_to_mode_reg (mode, op1);
3034 pat = GEN_FCN ((int) code) (object, op1, opalign);
3041 delete_insns_since (last);
3048 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3049 Return the return value of memset, 0 otherwise. */
3052 clear_storage_via_libcall (rtx object, rtx size)
3054 tree call_expr, arg_list, fn, object_tree, size_tree;
3055 enum machine_mode size_mode;
3058 /* OBJECT or SIZE may have been passed through protect_from_queue.
3060 It is unsafe to save the value generated by protect_from_queue
3061 and reuse it later. Consider what happens if emit_queue is
3062 called before the return value from protect_from_queue is used.
3064 Expansion of the CALL_EXPR below will call emit_queue before
3065 we are finished emitting RTL for argument setup. So if we are
3066 not careful we could get the wrong value for an argument.
3068 To avoid this problem we go ahead and emit code to copy OBJECT
3069 and SIZE into new pseudos. We can then place those new pseudos
3070 into an RTL_EXPR and use them later, even after a call to
3073 Note this is not strictly needed for library calls since they
3074 do not call emit_queue before loading their arguments. However,
3075 we may need to have library calls call emit_queue in the future
3076 since failing to do so could cause problems for targets which
3077 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3079 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3081 if (TARGET_MEM_FUNCTIONS)
3082 size_mode = TYPE_MODE (sizetype);
3084 size_mode = TYPE_MODE (unsigned_type_node);
3085 size = convert_to_mode (size_mode, size, 1);
3086 size = copy_to_mode_reg (size_mode, size);
3088 /* It is incorrect to use the libcall calling conventions to call
3089 memset in this context. This could be a user call to memset and
3090 the user may wish to examine the return value from memset. For
3091 targets where libcalls and normal calls have different conventions
3092 for returning pointers, we could end up generating incorrect code.
3094 For convenience, we generate the call to bzero this way as well. */
3096 object_tree = make_tree (ptr_type_node, object);
3097 if (TARGET_MEM_FUNCTIONS)
3098 size_tree = make_tree (sizetype, size);
3100 size_tree = make_tree (unsigned_type_node, size);
3102 fn = clear_storage_libcall_fn (true);
3103 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3104 if (TARGET_MEM_FUNCTIONS)
3105 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3106 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3108 /* Now we have to build up the CALL_EXPR itself. */
3109 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3110 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3111 call_expr, arg_list, NULL_TREE);
3112 TREE_SIDE_EFFECTS (call_expr) = 1;
3114 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3116 /* If we are initializing a readonly value, show the above call
3117 clobbered it. Otherwise, a load from it may erroneously be
3118 hoisted from a loop. */
3119 if (RTX_UNCHANGING_P (object))
3120 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3122 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3125 /* A subroutine of clear_storage_via_libcall. Create the tree node
3126 for the function we use for block clears. The first time FOR_CALL
3127 is true, we call assemble_external. */
3129 static GTY(()) tree block_clear_fn;
3132 init_block_clear_fn (const char *asmspec)
3134 if (!block_clear_fn)
3138 if (TARGET_MEM_FUNCTIONS)
3140 fn = get_identifier ("memset");
3141 args = build_function_type_list (ptr_type_node, ptr_type_node,
3142 integer_type_node, sizetype,
3147 fn = get_identifier ("bzero");
3148 args = build_function_type_list (void_type_node, ptr_type_node,
3149 unsigned_type_node, NULL_TREE);
3152 fn = build_decl (FUNCTION_DECL, fn, args);
3153 DECL_EXTERNAL (fn) = 1;
3154 TREE_PUBLIC (fn) = 1;
3155 DECL_ARTIFICIAL (fn) = 1;
3156 TREE_NOTHROW (fn) = 1;
3158 block_clear_fn = fn;
3163 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3164 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3169 clear_storage_libcall_fn (int for_call)
3171 static bool emitted_extern;
3173 if (!block_clear_fn)
3174 init_block_clear_fn (NULL);
3176 if (for_call && !emitted_extern)
3178 emitted_extern = true;
3179 make_decl_rtl (block_clear_fn, NULL);
3180 assemble_external (block_clear_fn);
3183 return block_clear_fn;
3186 /* Generate code to copy Y into X.
3187 Both Y and X must have the same mode, except that
3188 Y can be a constant with VOIDmode.
3189 This mode cannot be BLKmode; use emit_block_move for that.
3191 Return the last instruction emitted. */
3194 emit_move_insn (rtx x, rtx y)
3196 enum machine_mode mode = GET_MODE (x);
3197 rtx y_cst = NULL_RTX;
3200 x = protect_from_queue (x, 1);
3201 y = protect_from_queue (y, 0);
3203 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3206 /* Never force constant_p_rtx to memory. */
3207 if (GET_CODE (y) == CONSTANT_P_RTX)
3209 else if (CONSTANT_P (y))
3212 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3213 && (last_insn = compress_float_constant (x, y)))
3218 if (!LEGITIMATE_CONSTANT_P (y))
3220 y = force_const_mem (mode, y);
3222 /* If the target's cannot_force_const_mem prevented the spill,
3223 assume that the target's move expanders will also take care
3224 of the non-legitimate constant. */
3230 /* If X or Y are memory references, verify that their addresses are valid
3232 if (GET_CODE (x) == MEM
3233 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3234 && ! push_operand (x, GET_MODE (x)))
3236 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3237 x = validize_mem (x);
3239 if (GET_CODE (y) == MEM
3240 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3242 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3243 y = validize_mem (y);
3245 if (mode == BLKmode)
3248 last_insn = emit_move_insn_1 (x, y);
3250 if (y_cst && GET_CODE (x) == REG
3251 && (set = single_set (last_insn)) != NULL_RTX
3252 && SET_DEST (set) == x
3253 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3254 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3259 /* Low level part of emit_move_insn.
3260 Called just like emit_move_insn, but assumes X and Y
3261 are basically valid. */
3264 emit_move_insn_1 (rtx x, rtx y)
3266 enum machine_mode mode = GET_MODE (x);
3267 enum machine_mode submode;
3268 enum mode_class class = GET_MODE_CLASS (mode);
3270 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3273 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3275 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3277 /* Expand complex moves by moving real part and imag part, if possible. */
3278 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3279 && BLKmode != (submode = GET_MODE_INNER (mode))
3280 && (mov_optab->handlers[(int) submode].insn_code
3281 != CODE_FOR_nothing))
3283 /* Don't split destination if it is a stack push. */
3284 int stack = push_operand (x, GET_MODE (x));
3286 #ifdef PUSH_ROUNDING
3287 /* In case we output to the stack, but the size is smaller than the
3288 machine can push exactly, we need to use move instructions. */
3290 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3291 != GET_MODE_SIZE (submode)))
3294 HOST_WIDE_INT offset1, offset2;
3296 /* Do not use anti_adjust_stack, since we don't want to update
3297 stack_pointer_delta. */
3298 temp = expand_binop (Pmode,
3299 #ifdef STACK_GROWS_DOWNWARD
3307 (GET_MODE_SIZE (GET_MODE (x)))),
3308 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3310 if (temp != stack_pointer_rtx)
3311 emit_move_insn (stack_pointer_rtx, temp);
3313 #ifdef STACK_GROWS_DOWNWARD
3315 offset2 = GET_MODE_SIZE (submode);
3317 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3318 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3319 + GET_MODE_SIZE (submode));
3322 emit_move_insn (change_address (x, submode,
3323 gen_rtx_PLUS (Pmode,
3325 GEN_INT (offset1))),
3326 gen_realpart (submode, y));
3327 emit_move_insn (change_address (x, submode,
3328 gen_rtx_PLUS (Pmode,
3330 GEN_INT (offset2))),
3331 gen_imagpart (submode, y));
3335 /* If this is a stack, push the highpart first, so it
3336 will be in the argument order.
3338 In that case, change_address is used only to convert
3339 the mode, not to change the address. */
3342 /* Note that the real part always precedes the imag part in memory
3343 regardless of machine's endianness. */
3344 #ifdef STACK_GROWS_DOWNWARD
3345 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3346 gen_imagpart (submode, y));
3347 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3348 gen_realpart (submode, y));
3350 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3351 gen_realpart (submode, y));
3352 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3353 gen_imagpart (submode, y));
3358 rtx realpart_x, realpart_y;
3359 rtx imagpart_x, imagpart_y;
3361 /* If this is a complex value with each part being smaller than a
3362 word, the usual calling sequence will likely pack the pieces into
3363 a single register. Unfortunately, SUBREG of hard registers only
3364 deals in terms of words, so we have a problem converting input
3365 arguments to the CONCAT of two registers that is used elsewhere
3366 for complex values. If this is before reload, we can copy it into
3367 memory and reload. FIXME, we should see about using extract and
3368 insert on integer registers, but complex short and complex char
3369 variables should be rarely used. */
3370 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3371 && (reload_in_progress | reload_completed) == 0)
3374 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3376 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3378 if (packed_dest_p || packed_src_p)
3380 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3381 ? MODE_FLOAT : MODE_INT);
3383 enum machine_mode reg_mode
3384 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3386 if (reg_mode != BLKmode)
3388 rtx mem = assign_stack_temp (reg_mode,
3389 GET_MODE_SIZE (mode), 0);
3390 rtx cmem = adjust_address (mem, mode, 0);
3393 = N_("function using short complex types cannot be inline");
3397 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3399 emit_move_insn_1 (cmem, y);
3400 return emit_move_insn_1 (sreg, mem);
3404 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3406 emit_move_insn_1 (mem, sreg);
3407 return emit_move_insn_1 (x, cmem);
3413 realpart_x = gen_realpart (submode, x);
3414 realpart_y = gen_realpart (submode, y);
3415 imagpart_x = gen_imagpart (submode, x);
3416 imagpart_y = gen_imagpart (submode, y);
3418 /* Show the output dies here. This is necessary for SUBREGs
3419 of pseudos since we cannot track their lifetimes correctly;
3420 hard regs shouldn't appear here except as return values.
3421 We never want to emit such a clobber after reload. */
3423 && ! (reload_in_progress || reload_completed)
3424 && (GET_CODE (realpart_x) == SUBREG
3425 || GET_CODE (imagpart_x) == SUBREG))
3426 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3428 emit_move_insn (realpart_x, realpart_y);
3429 emit_move_insn (imagpart_x, imagpart_y);
3432 return get_last_insn ();
3435 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3436 find a mode to do it in. If we have a movcc, use it. Otherwise,
3437 find the MODE_INT mode of the same width. */
3438 else if (GET_MODE_CLASS (mode) == MODE_CC
3439 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3441 enum insn_code insn_code;
3442 enum machine_mode tmode = VOIDmode;
3446 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3449 for (tmode = QImode; tmode != VOIDmode;
3450 tmode = GET_MODE_WIDER_MODE (tmode))
3451 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3454 if (tmode == VOIDmode)
3457 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3458 may call change_address which is not appropriate if we were
3459 called when a reload was in progress. We don't have to worry
3460 about changing the address since the size in bytes is supposed to
3461 be the same. Copy the MEM to change the mode and move any
3462 substitutions from the old MEM to the new one. */
3464 if (reload_in_progress)
3466 x = gen_lowpart_common (tmode, x1);
3467 if (x == 0 && GET_CODE (x1) == MEM)
3469 x = adjust_address_nv (x1, tmode, 0);
3470 copy_replacements (x1, x);
3473 y = gen_lowpart_common (tmode, y1);
3474 if (y == 0 && GET_CODE (y1) == MEM)
3476 y = adjust_address_nv (y1, tmode, 0);
3477 copy_replacements (y1, y);
3482 x = gen_lowpart (tmode, x);
3483 y = gen_lowpart (tmode, y);
3486 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3487 return emit_insn (GEN_FCN (insn_code) (x, y));
3490 /* This will handle any multi-word or full-word mode that lacks a move_insn
3491 pattern. However, you will get better code if you define such patterns,
3492 even if they must turn into multiple assembler instructions. */
3493 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3500 #ifdef PUSH_ROUNDING
3502 /* If X is a push on the stack, do the push now and replace
3503 X with a reference to the stack pointer. */
3504 if (push_operand (x, GET_MODE (x)))
3509 /* Do not use anti_adjust_stack, since we don't want to update
3510 stack_pointer_delta. */
3511 temp = expand_binop (Pmode,
3512 #ifdef STACK_GROWS_DOWNWARD
3520 (GET_MODE_SIZE (GET_MODE (x)))),
3521 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3523 if (temp != stack_pointer_rtx)
3524 emit_move_insn (stack_pointer_rtx, temp);
3526 code = GET_CODE (XEXP (x, 0));
3528 /* Just hope that small offsets off SP are OK. */
3529 if (code == POST_INC)
3530 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3531 GEN_INT (-((HOST_WIDE_INT)
3532 GET_MODE_SIZE (GET_MODE (x)))));
3533 else if (code == POST_DEC)
3534 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3535 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3537 temp = stack_pointer_rtx;
3539 x = change_address (x, VOIDmode, temp);
3543 /* If we are in reload, see if either operand is a MEM whose address
3544 is scheduled for replacement. */
3545 if (reload_in_progress && GET_CODE (x) == MEM
3546 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3547 x = replace_equiv_address_nv (x, inner);
3548 if (reload_in_progress && GET_CODE (y) == MEM
3549 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3550 y = replace_equiv_address_nv (y, inner);
3556 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3559 rtx xpart = operand_subword (x, i, 1, mode);
3560 rtx ypart = operand_subword (y, i, 1, mode);
3562 /* If we can't get a part of Y, put Y into memory if it is a
3563 constant. Otherwise, force it into a register. If we still
3564 can't get a part of Y, abort. */
3565 if (ypart == 0 && CONSTANT_P (y))
3567 y = force_const_mem (mode, y);
3568 ypart = operand_subword (y, i, 1, mode);
3570 else if (ypart == 0)
3571 ypart = operand_subword_force (y, i, mode);
3573 if (xpart == 0 || ypart == 0)
3576 need_clobber |= (GET_CODE (xpart) == SUBREG);
3578 last_insn = emit_move_insn (xpart, ypart);
3584 /* Show the output dies here. This is necessary for SUBREGs
3585 of pseudos since we cannot track their lifetimes correctly;
3586 hard regs shouldn't appear here except as return values.
3587 We never want to emit such a clobber after reload. */
3589 && ! (reload_in_progress || reload_completed)
3590 && need_clobber != 0)
3591 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3601 /* If Y is representable exactly in a narrower mode, and the target can
3602 perform the extension directly from constant or memory, then emit the
3603 move as an extension. */
3606 compress_float_constant (rtx x, rtx y)
3608 enum machine_mode dstmode = GET_MODE (x);
3609 enum machine_mode orig_srcmode = GET_MODE (y);
3610 enum machine_mode srcmode;
3613 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3615 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3616 srcmode != orig_srcmode;
3617 srcmode = GET_MODE_WIDER_MODE (srcmode))
3620 rtx trunc_y, last_insn;
3622 /* Skip if the target can't extend this way. */
3623 ic = can_extend_p (dstmode, srcmode, 0);
3624 if (ic == CODE_FOR_nothing)
3627 /* Skip if the narrowed value isn't exact. */
3628 if (! exact_real_truncate (srcmode, &r))
3631 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3633 if (LEGITIMATE_CONSTANT_P (trunc_y))
3635 /* Skip if the target needs extra instructions to perform
3637 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3640 else if (float_extend_from_mem[dstmode][srcmode])
3641 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3645 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3646 last_insn = get_last_insn ();
3648 if (GET_CODE (x) == REG)
3649 set_unique_reg_note (last_insn, REG_EQUAL, y);
3657 /* Pushing data onto the stack. */
3659 /* Push a block of length SIZE (perhaps variable)
3660 and return an rtx to address the beginning of the block.
3661 Note that it is not possible for the value returned to be a QUEUED.
3662 The value may be virtual_outgoing_args_rtx.
3664 EXTRA is the number of bytes of padding to push in addition to SIZE.
3665 BELOW nonzero means this padding comes at low addresses;
3666 otherwise, the padding comes at high addresses. */
3669 push_block (rtx size, int extra, int below)
3673 size = convert_modes (Pmode, ptr_mode, size, 1);
3674 if (CONSTANT_P (size))
3675 anti_adjust_stack (plus_constant (size, extra));
3676 else if (GET_CODE (size) == REG && extra == 0)
3677 anti_adjust_stack (size);
3680 temp = copy_to_mode_reg (Pmode, size);
3682 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3683 temp, 0, OPTAB_LIB_WIDEN);
3684 anti_adjust_stack (temp);
3687 #ifndef STACK_GROWS_DOWNWARD
3693 temp = virtual_outgoing_args_rtx;
3694 if (extra != 0 && below)
3695 temp = plus_constant (temp, extra);
3699 if (GET_CODE (size) == CONST_INT)
3700 temp = plus_constant (virtual_outgoing_args_rtx,
3701 -INTVAL (size) - (below ? 0 : extra));
3702 else if (extra != 0 && !below)
3703 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3704 negate_rtx (Pmode, plus_constant (size, extra)));
3706 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3707 negate_rtx (Pmode, size));
3710 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3713 #ifdef PUSH_ROUNDING
3715 /* Emit single push insn. */
3718 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3721 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3723 enum insn_code icode;
3724 insn_operand_predicate_fn pred;
3726 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3727 /* If there is push pattern, use it. Otherwise try old way of throwing
3728 MEM representing push operation to move expander. */
3729 icode = push_optab->handlers[(int) mode].insn_code;
3730 if (icode != CODE_FOR_nothing)
3732 if (((pred = insn_data[(int) icode].operand[0].predicate)
3733 && !((*pred) (x, mode))))
3734 x = force_reg (mode, x);
3735 emit_insn (GEN_FCN (icode) (x));
3738 if (GET_MODE_SIZE (mode) == rounded_size)
3739 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3740 /* If we are to pad downward, adjust the stack pointer first and
3741 then store X into the stack location using an offset. This is
3742 because emit_move_insn does not know how to pad; it does not have
3744 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3746 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3747 HOST_WIDE_INT offset;
3749 emit_move_insn (stack_pointer_rtx,
3750 expand_binop (Pmode,
3751 #ifdef STACK_GROWS_DOWNWARD
3757 GEN_INT (rounded_size),
3758 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3760 offset = (HOST_WIDE_INT) padding_size;
3761 #ifdef STACK_GROWS_DOWNWARD
3762 if (STACK_PUSH_CODE == POST_DEC)
3763 /* We have already decremented the stack pointer, so get the
3765 offset += (HOST_WIDE_INT) rounded_size;
3767 if (STACK_PUSH_CODE == POST_INC)
3768 /* We have already incremented the stack pointer, so get the
3770 offset -= (HOST_WIDE_INT) rounded_size;
3772 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3776 #ifdef STACK_GROWS_DOWNWARD
3777 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3778 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3779 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3781 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3782 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3783 GEN_INT (rounded_size));
3785 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3788 dest = gen_rtx_MEM (mode, dest_addr);
3792 set_mem_attributes (dest, type, 1);
3794 if (flag_optimize_sibling_calls)
3795 /* Function incoming arguments may overlap with sibling call
3796 outgoing arguments and we cannot allow reordering of reads
3797 from function arguments with stores to outgoing arguments
3798 of sibling calls. */
3799 set_mem_alias_set (dest, 0);
3801 emit_move_insn (dest, x);
3805 /* Generate code to push X onto the stack, assuming it has mode MODE and
3807 MODE is redundant except when X is a CONST_INT (since they don't
3809 SIZE is an rtx for the size of data to be copied (in bytes),
3810 needed only if X is BLKmode.
3812 ALIGN (in bits) is maximum alignment we can assume.
3814 If PARTIAL and REG are both nonzero, then copy that many of the first
3815 words of X into registers starting with REG, and push the rest of X.
3816 The amount of space pushed is decreased by PARTIAL words,
3817 rounded *down* to a multiple of PARM_BOUNDARY.
3818 REG must be a hard register in this case.
3819 If REG is zero but PARTIAL is not, take any all others actions for an
3820 argument partially in registers, but do not actually load any
3823 EXTRA is the amount in bytes of extra space to leave next to this arg.
3824 This is ignored if an argument block has already been allocated.
3826 On a machine that lacks real push insns, ARGS_ADDR is the address of
3827 the bottom of the argument block for this call. We use indexing off there
3828 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3829 argument block has not been preallocated.
3831 ARGS_SO_FAR is the size of args previously pushed for this call.
3833 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3834 for arguments passed in registers. If nonzero, it will be the number
3835 of bytes required. */
3838 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3839 unsigned int align, int partial, rtx reg, int extra,
3840 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3844 enum direction stack_direction
3845 #ifdef STACK_GROWS_DOWNWARD
3851 /* Decide where to pad the argument: `downward' for below,
3852 `upward' for above, or `none' for don't pad it.
3853 Default is below for small data on big-endian machines; else above. */
3854 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3856 /* Invert direction if stack is post-decrement.
3858 if (STACK_PUSH_CODE == POST_DEC)
3859 if (where_pad != none)
3860 where_pad = (where_pad == downward ? upward : downward);
3862 xinner = x = protect_from_queue (x, 0);
3864 if (mode == BLKmode)
3866 /* Copy a block into the stack, entirely or partially. */
3869 int used = partial * UNITS_PER_WORD;
3870 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3878 /* USED is now the # of bytes we need not copy to the stack
3879 because registers will take care of them. */
3882 xinner = adjust_address (xinner, BLKmode, used);
3884 /* If the partial register-part of the arg counts in its stack size,
3885 skip the part of stack space corresponding to the registers.
3886 Otherwise, start copying to the beginning of the stack space,
3887 by setting SKIP to 0. */
3888 skip = (reg_parm_stack_space == 0) ? 0 : used;
3890 #ifdef PUSH_ROUNDING
3891 /* Do it with several push insns if that doesn't take lots of insns
3892 and if there is no difficulty with push insns that skip bytes
3893 on the stack for alignment purposes. */
3896 && GET_CODE (size) == CONST_INT
3898 && MEM_ALIGN (xinner) >= align
3899 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3900 /* Here we avoid the case of a structure whose weak alignment
3901 forces many pushes of a small amount of data,
3902 and such small pushes do rounding that causes trouble. */
3903 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3904 || align >= BIGGEST_ALIGNMENT
3905 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3906 == (align / BITS_PER_UNIT)))
3907 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3919 #endif /* PUSH_ROUNDING */
3923 /* Otherwise make space on the stack and copy the data
3924 to the address of that space. */
3926 /* Deduct words put into registers from the size we must copy. */
3929 if (GET_CODE (size) == CONST_INT)
3930 size = GEN_INT (INTVAL (size) - used);
3932 size = expand_binop (GET_MODE (size), sub_optab, size,
3933 GEN_INT (used), NULL_RTX, 0,
3937 /* Get the address of the stack space.
3938 In this case, we do not deal with EXTRA separately.
3939 A single stack adjust will do. */
3942 temp = push_block (size, extra, where_pad == downward);
3945 else if (GET_CODE (args_so_far) == CONST_INT)
3946 temp = memory_address (BLKmode,
3947 plus_constant (args_addr,
3948 skip + INTVAL (args_so_far)));
3950 temp = memory_address (BLKmode,
3951 plus_constant (gen_rtx_PLUS (Pmode,
3956 if (!ACCUMULATE_OUTGOING_ARGS)
3958 /* If the source is referenced relative to the stack pointer,
3959 copy it to another register to stabilize it. We do not need
3960 to do this if we know that we won't be changing sp. */
3962 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3963 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3964 temp = copy_to_reg (temp);
3967 target = gen_rtx_MEM (BLKmode, temp);
3971 set_mem_attributes (target, type, 1);
3972 /* Function incoming arguments may overlap with sibling call
3973 outgoing arguments and we cannot allow reordering of reads
3974 from function arguments with stores to outgoing arguments
3975 of sibling calls. */
3976 set_mem_alias_set (target, 0);
3979 /* ALIGN may well be better aligned than TYPE, e.g. due to
3980 PARM_BOUNDARY. Assume the caller isn't lying. */
3981 set_mem_align (target, align);
3983 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3986 else if (partial > 0)
3988 /* Scalar partly in registers. */
3990 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3993 /* # words of start of argument
3994 that we must make space for but need not store. */
3995 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3996 int args_offset = INTVAL (args_so_far);
3999 /* Push padding now if padding above and stack grows down,
4000 or if padding below and stack grows up.
4001 But if space already allocated, this has already been done. */
4002 if (extra && args_addr == 0
4003 && where_pad != none && where_pad != stack_direction)
4004 anti_adjust_stack (GEN_INT (extra));
4006 /* If we make space by pushing it, we might as well push
4007 the real data. Otherwise, we can leave OFFSET nonzero
4008 and leave the space uninitialized. */
4012 /* Now NOT_STACK gets the number of words that we don't need to
4013 allocate on the stack. */
4014 not_stack = partial - offset;
4016 /* If the partial register-part of the arg counts in its stack size,
4017 skip the part of stack space corresponding to the registers.
4018 Otherwise, start copying to the beginning of the stack space,
4019 by setting SKIP to 0. */
4020 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4022 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4023 x = validize_mem (force_const_mem (mode, x));
4025 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4026 SUBREGs of such registers are not allowed. */
4027 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4028 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4029 x = copy_to_reg (x);
4031 /* Loop over all the words allocated on the stack for this arg. */
4032 /* We can do it by words, because any scalar bigger than a word
4033 has a size a multiple of a word. */
4034 #ifndef PUSH_ARGS_REVERSED
4035 for (i = not_stack; i < size; i++)
4037 for (i = size - 1; i >= not_stack; i--)
4039 if (i >= not_stack + offset)
4040 emit_push_insn (operand_subword_force (x, i, mode),
4041 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4043 GEN_INT (args_offset + ((i - not_stack + skip)
4045 reg_parm_stack_space, alignment_pad);
4052 /* Push padding now if padding above and stack grows down,
4053 or if padding below and stack grows up.
4054 But if space already allocated, this has already been done. */
4055 if (extra && args_addr == 0
4056 && where_pad != none && where_pad != stack_direction)
4057 anti_adjust_stack (GEN_INT (extra));
4059 #ifdef PUSH_ROUNDING
4060 if (args_addr == 0 && PUSH_ARGS)
4061 emit_single_push_insn (mode, x, type);
4065 if (GET_CODE (args_so_far) == CONST_INT)
4067 = memory_address (mode,
4068 plus_constant (args_addr,
4069 INTVAL (args_so_far)));
4071 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4073 dest = gen_rtx_MEM (mode, addr);
4076 set_mem_attributes (dest, type, 1);
4077 /* Function incoming arguments may overlap with sibling call
4078 outgoing arguments and we cannot allow reordering of reads
4079 from function arguments with stores to outgoing arguments
4080 of sibling calls. */
4081 set_mem_alias_set (dest, 0);
4084 emit_move_insn (dest, x);
4088 /* If part should go in registers, copy that part
4089 into the appropriate registers. Do this now, at the end,
4090 since mem-to-mem copies above may do function calls. */
4091 if (partial > 0 && reg != 0)
4093 /* Handle calls that pass values in multiple non-contiguous locations.
4094 The Irix 6 ABI has examples of this. */
4095 if (GET_CODE (reg) == PARALLEL)
4096 emit_group_load (reg, x, type, -1);
4098 move_block_to_reg (REGNO (reg), x, partial, mode);
4101 if (extra && args_addr == 0 && where_pad == stack_direction)
4102 anti_adjust_stack (GEN_INT (extra));
4104 if (alignment_pad && args_addr == 0)
4105 anti_adjust_stack (alignment_pad);
4108 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4112 get_subtarget (rtx x)
4115 /* Only registers can be subtargets. */
4116 || GET_CODE (x) != REG
4117 /* If the register is readonly, it can't be set more than once. */
4118 || RTX_UNCHANGING_P (x)
4119 /* Don't use hard regs to avoid extending their life. */
4120 || REGNO (x) < FIRST_PSEUDO_REGISTER
4121 /* Avoid subtargets inside loops,
4122 since they hide some invariant expressions. */
4123 || preserve_subexpressions_p ())
4127 /* Expand an assignment that stores the value of FROM into TO.
4128 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4129 (This may contain a QUEUED rtx;
4130 if the value is constant, this rtx is a constant.)
4131 Otherwise, the returned value is NULL_RTX. */
4134 expand_assignment (tree to, tree from, int want_value)
4139 /* Don't crash if the lhs of the assignment was erroneous. */
4141 if (TREE_CODE (to) == ERROR_MARK)
4143 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4144 return want_value ? result : NULL_RTX;
4147 /* Assignment of a structure component needs special treatment
4148 if the structure component's rtx is not simply a MEM.
4149 Assignment of an array element at a constant index, and assignment of
4150 an array element in an unaligned packed structure field, has the same
4153 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4154 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4155 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4157 enum machine_mode mode1;
4158 HOST_WIDE_INT bitsize, bitpos;
4166 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4167 &unsignedp, &volatilep);
4169 /* If we are going to use store_bit_field and extract_bit_field,
4170 make sure to_rtx will be safe for multiple use. */
4172 if (mode1 == VOIDmode && want_value)
4173 tem = stabilize_reference (tem);
4175 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4179 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4181 if (GET_CODE (to_rtx) != MEM)
4184 #ifdef POINTERS_EXTEND_UNSIGNED
4185 if (GET_MODE (offset_rtx) != Pmode)
4186 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4188 if (GET_MODE (offset_rtx) != ptr_mode)
4189 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4192 /* A constant address in TO_RTX can have VOIDmode, we must not try
4193 to call force_reg for that case. Avoid that case. */
4194 if (GET_CODE (to_rtx) == MEM
4195 && GET_MODE (to_rtx) == BLKmode
4196 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4198 && (bitpos % bitsize) == 0
4199 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4200 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4202 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4206 to_rtx = offset_address (to_rtx, offset_rtx,
4207 highest_pow2_factor_for_type (TREE_TYPE (to),
4211 if (GET_CODE (to_rtx) == MEM)
4213 /* If the field is at offset zero, we could have been given the
4214 DECL_RTX of the parent struct. Don't munge it. */
4215 to_rtx = shallow_copy_rtx (to_rtx);
4217 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4220 /* Deal with volatile and readonly fields. The former is only done
4221 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4222 if (volatilep && GET_CODE (to_rtx) == MEM)
4224 if (to_rtx == orig_to_rtx)
4225 to_rtx = copy_rtx (to_rtx);
4226 MEM_VOLATILE_P (to_rtx) = 1;
4229 if (TREE_CODE (to) == COMPONENT_REF
4230 && TREE_READONLY (TREE_OPERAND (to, 1)))
4232 if (to_rtx == orig_to_rtx)
4233 to_rtx = copy_rtx (to_rtx);
4234 RTX_UNCHANGING_P (to_rtx) = 1;
4237 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4239 if (to_rtx == orig_to_rtx)
4240 to_rtx = copy_rtx (to_rtx);
4241 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4244 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4246 /* Spurious cast for HPUX compiler. */
4247 ? ((enum machine_mode)
4248 TYPE_MODE (TREE_TYPE (to)))
4250 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4252 preserve_temp_slots (result);
4256 /* If the value is meaningful, convert RESULT to the proper mode.
4257 Otherwise, return nothing. */
4258 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4259 TYPE_MODE (TREE_TYPE (from)),
4261 TREE_UNSIGNED (TREE_TYPE (to)))
4265 /* If the rhs is a function call and its value is not an aggregate,
4266 call the function before we start to compute the lhs.
4267 This is needed for correct code for cases such as
4268 val = setjmp (buf) on machines where reference to val
4269 requires loading up part of an address in a separate insn.
4271 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4272 since it might be a promoted variable where the zero- or sign- extension
4273 needs to be done. Handling this in the normal way is safe because no
4274 computation is done before the call. */
4275 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4276 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4277 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4278 && GET_CODE (DECL_RTL (to)) == REG))
4283 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4285 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4287 /* Handle calls that return values in multiple non-contiguous locations.
4288 The Irix 6 ABI has examples of this. */
4289 if (GET_CODE (to_rtx) == PARALLEL)
4290 emit_group_load (to_rtx, value, TREE_TYPE (from),
4291 int_size_in_bytes (TREE_TYPE (from)));
4292 else if (GET_MODE (to_rtx) == BLKmode)
4293 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4296 #ifdef POINTERS_EXTEND_UNSIGNED
4297 if (POINTER_TYPE_P (TREE_TYPE (to))
4298 && GET_MODE (to_rtx) != GET_MODE (value))
4299 value = convert_memory_address (GET_MODE (to_rtx), value);
4301 emit_move_insn (to_rtx, value);
4303 preserve_temp_slots (to_rtx);
4306 return want_value ? to_rtx : NULL_RTX;
4309 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4310 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4313 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4315 /* Don't move directly into a return register. */
4316 if (TREE_CODE (to) == RESULT_DECL
4317 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4322 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4324 if (GET_CODE (to_rtx) == PARALLEL)
4325 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4326 int_size_in_bytes (TREE_TYPE (from)));
4328 emit_move_insn (to_rtx, temp);
4330 preserve_temp_slots (to_rtx);
4333 return want_value ? to_rtx : NULL_RTX;
4336 /* In case we are returning the contents of an object which overlaps
4337 the place the value is being stored, use a safe function when copying
4338 a value through a pointer into a structure value return block. */
4339 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4340 && current_function_returns_struct
4341 && !current_function_returns_pcc_struct)
4346 size = expr_size (from);
4347 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4349 if (TARGET_MEM_FUNCTIONS)
4350 emit_library_call (memmove_libfunc, LCT_NORMAL,
4351 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4352 XEXP (from_rtx, 0), Pmode,
4353 convert_to_mode (TYPE_MODE (sizetype),
4354 size, TREE_UNSIGNED (sizetype)),
4355 TYPE_MODE (sizetype));
4357 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4358 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4359 XEXP (to_rtx, 0), Pmode,
4360 convert_to_mode (TYPE_MODE (integer_type_node),
4362 TREE_UNSIGNED (integer_type_node)),
4363 TYPE_MODE (integer_type_node));
4365 preserve_temp_slots (to_rtx);
4368 return want_value ? to_rtx : NULL_RTX;
4371 /* Compute FROM and store the value in the rtx we got. */
4374 result = store_expr (from, to_rtx, want_value);
4375 preserve_temp_slots (result);
4378 return want_value ? result : NULL_RTX;
4381 /* Generate code for computing expression EXP,
4382 and storing the value into TARGET.
4383 TARGET may contain a QUEUED rtx.
4385 If WANT_VALUE & 1 is nonzero, return a copy of the value
4386 not in TARGET, so that we can be sure to use the proper
4387 value in a containing expression even if TARGET has something
4388 else stored in it. If possible, we copy the value through a pseudo
4389 and return that pseudo. Or, if the value is constant, we try to
4390 return the constant. In some cases, we return a pseudo
4391 copied *from* TARGET.
4393 If the mode is BLKmode then we may return TARGET itself.
4394 It turns out that in BLKmode it doesn't cause a problem.
4395 because C has no operators that could combine two different
4396 assignments into the same BLKmode object with different values
4397 with no sequence point. Will other languages need this to
4400 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4401 to catch quickly any cases where the caller uses the value
4402 and fails to set WANT_VALUE.
4404 If WANT_VALUE & 2 is set, this is a store into a call param on the
4405 stack, and block moves may need to be treated specially. */
4408 store_expr (tree exp, rtx target, int want_value)
4411 int dont_return_target = 0;
4412 int dont_store_target = 0;
4414 if (VOID_TYPE_P (TREE_TYPE (exp)))
4416 /* C++ can generate ?: expressions with a throw expression in one
4417 branch and an rvalue in the other. Here, we resolve attempts to
4418 store the throw expression's nonexistent result. */
4421 expand_expr (exp, const0_rtx, VOIDmode, 0);
4424 if (TREE_CODE (exp) == COMPOUND_EXPR)
4426 /* Perform first part of compound expression, then assign from second
4428 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4429 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4431 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4433 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4435 /* For conditional expression, get safe form of the target. Then
4436 test the condition, doing the appropriate assignment on either
4437 side. This avoids the creation of unnecessary temporaries.
4438 For non-BLKmode, it is more efficient not to do this. */
4440 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4443 target = protect_from_queue (target, 1);
4445 do_pending_stack_adjust ();
4447 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4448 start_cleanup_deferral ();
4449 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4450 end_cleanup_deferral ();
4452 emit_jump_insn (gen_jump (lab2));
4455 start_cleanup_deferral ();
4456 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4457 end_cleanup_deferral ();
4462 return want_value & 1 ? target : NULL_RTX;
4464 else if (queued_subexp_p (target))
4465 /* If target contains a postincrement, let's not risk
4466 using it as the place to generate the rhs. */
4468 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4470 /* Expand EXP into a new pseudo. */
4471 temp = gen_reg_rtx (GET_MODE (target));
4472 temp = expand_expr (exp, temp, GET_MODE (target),
4474 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4477 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4479 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4481 /* If target is volatile, ANSI requires accessing the value
4482 *from* the target, if it is accessed. So make that happen.
4483 In no case return the target itself. */
4484 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4485 dont_return_target = 1;
4487 else if ((want_value & 1) != 0
4488 && GET_CODE (target) == MEM
4489 && ! MEM_VOLATILE_P (target)
4490 && GET_MODE (target) != BLKmode)
4491 /* If target is in memory and caller wants value in a register instead,
4492 arrange that. Pass TARGET as target for expand_expr so that,
4493 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4494 We know expand_expr will not use the target in that case.
4495 Don't do this if TARGET is volatile because we are supposed
4496 to write it and then read it. */
4498 temp = expand_expr (exp, target, GET_MODE (target),
4499 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4500 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4502 /* If TEMP is already in the desired TARGET, only copy it from
4503 memory and don't store it there again. */
4505 || (rtx_equal_p (temp, target)
4506 && ! side_effects_p (temp) && ! side_effects_p (target)))
4507 dont_store_target = 1;
4508 temp = copy_to_reg (temp);
4510 dont_return_target = 1;
4512 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4513 /* If this is a scalar in a register that is stored in a wider mode
4514 than the declared mode, compute the result into its declared mode
4515 and then convert to the wider mode. Our value is the computed
4518 rtx inner_target = 0;
4520 /* If we don't want a value, we can do the conversion inside EXP,
4521 which will often result in some optimizations. Do the conversion
4522 in two steps: first change the signedness, if needed, then
4523 the extend. But don't do this if the type of EXP is a subtype
4524 of something else since then the conversion might involve
4525 more than just converting modes. */
4526 if ((want_value & 1) == 0
4527 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4528 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4530 if (TREE_UNSIGNED (TREE_TYPE (exp))
4531 != SUBREG_PROMOTED_UNSIGNED_P (target))
4533 ((*lang_hooks.types.signed_or_unsigned_type)
4534 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4536 exp = convert ((*lang_hooks.types.type_for_mode)
4537 (GET_MODE (SUBREG_REG (target)),
4538 SUBREG_PROMOTED_UNSIGNED_P (target)),
4541 inner_target = SUBREG_REG (target);
4544 temp = expand_expr (exp, inner_target, VOIDmode,
4545 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4547 /* If TEMP is a MEM and we want a result value, make the access
4548 now so it gets done only once. Strictly speaking, this is
4549 only necessary if the MEM is volatile, or if the address
4550 overlaps TARGET. But not performing the load twice also
4551 reduces the amount of rtl we generate and then have to CSE. */
4552 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4553 temp = copy_to_reg (temp);
4555 /* If TEMP is a VOIDmode constant, use convert_modes to make
4556 sure that we properly convert it. */
4557 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4559 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4560 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4561 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4562 GET_MODE (target), temp,
4563 SUBREG_PROMOTED_UNSIGNED_P (target));
4566 convert_move (SUBREG_REG (target), temp,
4567 SUBREG_PROMOTED_UNSIGNED_P (target));
4569 /* If we promoted a constant, change the mode back down to match
4570 target. Otherwise, the caller might get confused by a result whose
4571 mode is larger than expected. */
4573 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4575 if (GET_MODE (temp) != VOIDmode)
4577 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4578 SUBREG_PROMOTED_VAR_P (temp) = 1;
4579 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4580 SUBREG_PROMOTED_UNSIGNED_P (target));
4583 temp = convert_modes (GET_MODE (target),
4584 GET_MODE (SUBREG_REG (target)),
4585 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4588 return want_value & 1 ? temp : NULL_RTX;
4592 temp = expand_expr (exp, target, GET_MODE (target),
4593 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4594 /* Return TARGET if it's a specified hardware register.
4595 If TARGET is a volatile mem ref, either return TARGET
4596 or return a reg copied *from* TARGET; ANSI requires this.
4598 Otherwise, if TEMP is not TARGET, return TEMP
4599 if it is constant (for efficiency),
4600 or if we really want the correct value. */
4601 if (!(target && GET_CODE (target) == REG
4602 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4603 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4604 && ! rtx_equal_p (temp, target)
4605 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4606 dont_return_target = 1;
4609 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4610 the same as that of TARGET, adjust the constant. This is needed, for
4611 example, in case it is a CONST_DOUBLE and we want only a word-sized
4613 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4614 && TREE_CODE (exp) != ERROR_MARK
4615 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4616 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4617 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4619 /* If value was not generated in the target, store it there.
4620 Convert the value to TARGET's type first if necessary.
4621 If TEMP and TARGET compare equal according to rtx_equal_p, but
4622 one or both of them are volatile memory refs, we have to distinguish
4624 - expand_expr has used TARGET. In this case, we must not generate
4625 another copy. This can be detected by TARGET being equal according
4627 - expand_expr has not used TARGET - that means that the source just
4628 happens to have the same RTX form. Since temp will have been created
4629 by expand_expr, it will compare unequal according to == .
4630 We must generate a copy in this case, to reach the correct number
4631 of volatile memory references. */
4633 if ((! rtx_equal_p (temp, target)
4634 || (temp != target && (side_effects_p (temp)
4635 || side_effects_p (target))))
4636 && TREE_CODE (exp) != ERROR_MARK
4637 && ! dont_store_target
4638 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4639 but TARGET is not valid memory reference, TEMP will differ
4640 from TARGET although it is really the same location. */
4641 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4642 || target != DECL_RTL_IF_SET (exp))
4643 /* If there's nothing to copy, don't bother. Don't call expr_size
4644 unless necessary, because some front-ends (C++) expr_size-hook
4645 aborts on objects that are not supposed to be bit-copied or
4647 && expr_size (exp) != const0_rtx)
4649 target = protect_from_queue (target, 1);
4650 if (GET_MODE (temp) != GET_MODE (target)
4651 && GET_MODE (temp) != VOIDmode)
4653 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4654 if (dont_return_target)
4656 /* In this case, we will return TEMP,
4657 so make sure it has the proper mode.
4658 But don't forget to store the value into TARGET. */
4659 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4660 emit_move_insn (target, temp);
4663 convert_move (target, temp, unsignedp);
4666 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4668 /* Handle copying a string constant into an array. The string
4669 constant may be shorter than the array. So copy just the string's
4670 actual length, and clear the rest. First get the size of the data
4671 type of the string, which is actually the size of the target. */
4672 rtx size = expr_size (exp);
4674 if (GET_CODE (size) == CONST_INT
4675 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4676 emit_block_move (target, temp, size,
4678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4681 /* Compute the size of the data to copy from the string. */
4683 = size_binop (MIN_EXPR,
4684 make_tree (sizetype, size),
4685 size_int (TREE_STRING_LENGTH (exp)));
4687 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4689 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4692 /* Copy that much. */
4693 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4694 TREE_UNSIGNED (sizetype));
4695 emit_block_move (target, temp, copy_size_rtx,
4697 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4699 /* Figure out how much is left in TARGET that we have to clear.
4700 Do all calculations in ptr_mode. */
4701 if (GET_CODE (copy_size_rtx) == CONST_INT)
4703 size = plus_constant (size, -INTVAL (copy_size_rtx));
4704 target = adjust_address (target, BLKmode,
4705 INTVAL (copy_size_rtx));
4709 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4710 copy_size_rtx, NULL_RTX, 0,
4713 #ifdef POINTERS_EXTEND_UNSIGNED
4714 if (GET_MODE (copy_size_rtx) != Pmode)
4715 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4716 TREE_UNSIGNED (sizetype));
4719 target = offset_address (target, copy_size_rtx,
4720 highest_pow2_factor (copy_size));
4721 label = gen_label_rtx ();
4722 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4723 GET_MODE (size), 0, label);
4726 if (size != const0_rtx)
4727 clear_storage (target, size);
4733 /* Handle calls that return values in multiple non-contiguous locations.
4734 The Irix 6 ABI has examples of this. */
4735 else if (GET_CODE (target) == PARALLEL)
4736 emit_group_load (target, temp, TREE_TYPE (exp),
4737 int_size_in_bytes (TREE_TYPE (exp)));
4738 else if (GET_MODE (temp) == BLKmode)
4739 emit_block_move (target, temp, expr_size (exp),
4741 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4743 emit_move_insn (target, temp);
4746 /* If we don't want a value, return NULL_RTX. */
4747 if ((want_value & 1) == 0)
4750 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4751 ??? The latter test doesn't seem to make sense. */
4752 else if (dont_return_target && GET_CODE (temp) != MEM)
4755 /* Return TARGET itself if it is a hard register. */
4756 else if ((want_value & 1) != 0
4757 && GET_MODE (target) != BLKmode
4758 && ! (GET_CODE (target) == REG
4759 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4760 return copy_to_reg (target);
4766 /* Return 1 if EXP just contains zeros. */
4769 is_zeros_p (tree exp)
4773 switch (TREE_CODE (exp))
4777 case NON_LVALUE_EXPR:
4778 case VIEW_CONVERT_EXPR:
4779 return is_zeros_p (TREE_OPERAND (exp, 0));
4782 return integer_zerop (exp);
4786 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4789 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4792 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4793 elt = TREE_CHAIN (elt))
4794 if (!is_zeros_p (TREE_VALUE (elt)))
4800 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4801 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4802 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4803 if (! is_zeros_p (TREE_VALUE (elt)))
4813 /* Return 1 if EXP contains mostly (3/4) zeros. */
4816 mostly_zeros_p (tree exp)
4818 if (TREE_CODE (exp) == CONSTRUCTOR)
4820 int elts = 0, zeros = 0;
4821 tree elt = CONSTRUCTOR_ELTS (exp);
4822 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4824 /* If there are no ranges of true bits, it is all zero. */
4825 return elt == NULL_TREE;
4827 for (; elt; elt = TREE_CHAIN (elt))
4829 /* We do not handle the case where the index is a RANGE_EXPR,
4830 so the statistic will be somewhat inaccurate.
4831 We do make a more accurate count in store_constructor itself,
4832 so since this function is only used for nested array elements,
4833 this should be close enough. */
4834 if (mostly_zeros_p (TREE_VALUE (elt)))
4839 return 4 * zeros >= 3 * elts;
4842 return is_zeros_p (exp);
4845 /* Helper function for store_constructor.
4846 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4847 TYPE is the type of the CONSTRUCTOR, not the element type.
4848 CLEARED is as for store_constructor.
4849 ALIAS_SET is the alias set to use for any stores.
4851 This provides a recursive shortcut back to store_constructor when it isn't
4852 necessary to go through store_field. This is so that we can pass through
4853 the cleared field to let store_constructor know that we may not have to
4854 clear a substructure if the outer structure has already been cleared. */
4857 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4858 HOST_WIDE_INT bitpos, enum machine_mode mode,
4859 tree exp, tree type, int cleared, int alias_set)
4861 if (TREE_CODE (exp) == CONSTRUCTOR
4862 && bitpos % BITS_PER_UNIT == 0
4863 /* If we have a nonzero bitpos for a register target, then we just
4864 let store_field do the bitfield handling. This is unlikely to
4865 generate unnecessary clear instructions anyways. */
4866 && (bitpos == 0 || GET_CODE (target) == MEM))
4868 if (GET_CODE (target) == MEM)
4870 = adjust_address (target,
4871 GET_MODE (target) == BLKmode
4873 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4874 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4877 /* Update the alias set, if required. */
4878 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4879 && MEM_ALIAS_SET (target) != 0)
4881 target = copy_rtx (target);
4882 set_mem_alias_set (target, alias_set);
4885 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4888 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4892 /* Store the value of constructor EXP into the rtx TARGET.
4893 TARGET is either a REG or a MEM; we know it cannot conflict, since
4894 safe_from_p has been called.
4895 CLEARED is true if TARGET is known to have been zero'd.
4896 SIZE is the number of bytes of TARGET we are allowed to modify: this
4897 may not be the same as the size of EXP if we are assigning to a field
4898 which has been packed to exclude padding bits. */
4901 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4903 tree type = TREE_TYPE (exp);
4904 #ifdef WORD_REGISTER_OPERATIONS
4905 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4908 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4909 || TREE_CODE (type) == QUAL_UNION_TYPE)
4913 /* If size is zero or the target is already cleared, do nothing. */
4914 if (size == 0 || cleared)
4916 /* We either clear the aggregate or indicate the value is dead. */
4917 else if ((TREE_CODE (type) == UNION_TYPE
4918 || TREE_CODE (type) == QUAL_UNION_TYPE)
4919 && ! CONSTRUCTOR_ELTS (exp))
4920 /* If the constructor is empty, clear the union. */
4922 clear_storage (target, expr_size (exp));
4926 /* If we are building a static constructor into a register,
4927 set the initial value as zero so we can fold the value into
4928 a constant. But if more than one register is involved,
4929 this probably loses. */
4930 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4931 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4933 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4937 /* If the constructor has fewer fields than the structure
4938 or if we are initializing the structure to mostly zeros,
4939 clear the whole structure first. Don't do this if TARGET is a
4940 register whose mode size isn't equal to SIZE since clear_storage
4941 can't handle this case. */
4942 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4943 || mostly_zeros_p (exp))
4944 && (GET_CODE (target) != REG
4945 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4948 rtx xtarget = target;
4950 if (readonly_fields_p (type))
4952 xtarget = copy_rtx (xtarget);
4953 RTX_UNCHANGING_P (xtarget) = 1;
4956 clear_storage (xtarget, GEN_INT (size));
4961 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4963 /* Store each element of the constructor into
4964 the corresponding field of TARGET. */
4966 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4968 tree field = TREE_PURPOSE (elt);
4969 tree value = TREE_VALUE (elt);
4970 enum machine_mode mode;
4971 HOST_WIDE_INT bitsize;
4972 HOST_WIDE_INT bitpos = 0;
4974 rtx to_rtx = target;
4976 /* Just ignore missing fields.
4977 We cleared the whole structure, above,
4978 if any fields are missing. */
4982 if (cleared && is_zeros_p (value))
4985 if (host_integerp (DECL_SIZE (field), 1))
4986 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4990 mode = DECL_MODE (field);
4991 if (DECL_BIT_FIELD (field))
4994 offset = DECL_FIELD_OFFSET (field);
4995 if (host_integerp (offset, 0)
4996 && host_integerp (bit_position (field), 0))
4998 bitpos = int_bit_position (field);
5002 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5008 if (CONTAINS_PLACEHOLDER_P (offset))
5009 offset = build (WITH_RECORD_EXPR, sizetype,
5010 offset, make_tree (TREE_TYPE (exp), target));
5012 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5013 if (GET_CODE (to_rtx) != MEM)
5016 #ifdef POINTERS_EXTEND_UNSIGNED
5017 if (GET_MODE (offset_rtx) != Pmode)
5018 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5020 if (GET_MODE (offset_rtx) != ptr_mode)
5021 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5024 to_rtx = offset_address (to_rtx, offset_rtx,
5025 highest_pow2_factor (offset));
5028 if (TREE_READONLY (field))
5030 if (GET_CODE (to_rtx) == MEM)
5031 to_rtx = copy_rtx (to_rtx);
5033 RTX_UNCHANGING_P (to_rtx) = 1;
5036 #ifdef WORD_REGISTER_OPERATIONS
5037 /* If this initializes a field that is smaller than a word, at the
5038 start of a word, try to widen it to a full word.
5039 This special case allows us to output C++ member function
5040 initializations in a form that the optimizers can understand. */
5041 if (GET_CODE (target) == REG
5042 && bitsize < BITS_PER_WORD
5043 && bitpos % BITS_PER_WORD == 0
5044 && GET_MODE_CLASS (mode) == MODE_INT
5045 && TREE_CODE (value) == INTEGER_CST
5047 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5049 tree type = TREE_TYPE (value);
5051 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5053 type = (*lang_hooks.types.type_for_size)
5054 (BITS_PER_WORD, TREE_UNSIGNED (type));
5055 value = convert (type, value);
5058 if (BYTES_BIG_ENDIAN)
5060 = fold (build (LSHIFT_EXPR, type, value,
5061 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5062 bitsize = BITS_PER_WORD;
5067 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5068 && DECL_NONADDRESSABLE_P (field))
5070 to_rtx = copy_rtx (to_rtx);
5071 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5074 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5075 value, type, cleared,
5076 get_alias_set (TREE_TYPE (field)));
5079 else if (TREE_CODE (type) == ARRAY_TYPE
5080 || TREE_CODE (type) == VECTOR_TYPE)
5085 tree domain = TYPE_DOMAIN (type);
5086 tree elttype = TREE_TYPE (type);
5088 HOST_WIDE_INT minelt = 0;
5089 HOST_WIDE_INT maxelt = 0;
5091 /* Vectors are like arrays, but the domain is stored via an array
5093 if (TREE_CODE (type) == VECTOR_TYPE)
5095 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5096 the same field as TYPE_DOMAIN, we are not guaranteed that
5098 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5099 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5102 const_bounds_p = (TYPE_MIN_VALUE (domain)
5103 && TYPE_MAX_VALUE (domain)
5104 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5105 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5107 /* If we have constant bounds for the range of the type, get them. */
5110 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5111 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5114 /* If the constructor has fewer elements than the array,
5115 clear the whole array first. Similarly if this is
5116 static constructor of a non-BLKmode object. */
5117 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5121 HOST_WIDE_INT count = 0, zero_count = 0;
5122 need_to_clear = ! const_bounds_p;
5124 /* This loop is a more accurate version of the loop in
5125 mostly_zeros_p (it handles RANGE_EXPR in an index).
5126 It is also needed to check for missing elements. */
5127 for (elt = CONSTRUCTOR_ELTS (exp);
5128 elt != NULL_TREE && ! need_to_clear;
5129 elt = TREE_CHAIN (elt))
5131 tree index = TREE_PURPOSE (elt);
5132 HOST_WIDE_INT this_node_count;
5134 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5136 tree lo_index = TREE_OPERAND (index, 0);
5137 tree hi_index = TREE_OPERAND (index, 1);
5139 if (! host_integerp (lo_index, 1)
5140 || ! host_integerp (hi_index, 1))
5146 this_node_count = (tree_low_cst (hi_index, 1)
5147 - tree_low_cst (lo_index, 1) + 1);
5150 this_node_count = 1;
5152 count += this_node_count;
5153 if (mostly_zeros_p (TREE_VALUE (elt)))
5154 zero_count += this_node_count;
5157 /* Clear the entire array first if there are any missing elements,
5158 or if the incidence of zero elements is >= 75%. */
5160 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5164 if (need_to_clear && size > 0)
5169 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5171 clear_storage (target, GEN_INT (size));
5175 else if (REG_P (target))
5176 /* Inform later passes that the old value is dead. */
5177 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5179 /* Store each element of the constructor into
5180 the corresponding element of TARGET, determined
5181 by counting the elements. */
5182 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5184 elt = TREE_CHAIN (elt), i++)
5186 enum machine_mode mode;
5187 HOST_WIDE_INT bitsize;
5188 HOST_WIDE_INT bitpos;
5190 tree value = TREE_VALUE (elt);
5191 tree index = TREE_PURPOSE (elt);
5192 rtx xtarget = target;
5194 if (cleared && is_zeros_p (value))
5197 unsignedp = TREE_UNSIGNED (elttype);
5198 mode = TYPE_MODE (elttype);
5199 if (mode == BLKmode)
5200 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5201 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5204 bitsize = GET_MODE_BITSIZE (mode);
5206 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5208 tree lo_index = TREE_OPERAND (index, 0);
5209 tree hi_index = TREE_OPERAND (index, 1);
5210 rtx index_r, pos_rtx, loop_end;
5211 struct nesting *loop;
5212 HOST_WIDE_INT lo, hi, count;
5215 /* If the range is constant and "small", unroll the loop. */
5217 && host_integerp (lo_index, 0)
5218 && host_integerp (hi_index, 0)
5219 && (lo = tree_low_cst (lo_index, 0),
5220 hi = tree_low_cst (hi_index, 0),
5221 count = hi - lo + 1,
5222 (GET_CODE (target) != MEM
5224 || (host_integerp (TYPE_SIZE (elttype), 1)
5225 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5228 lo -= minelt; hi -= minelt;
5229 for (; lo <= hi; lo++)
5231 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5233 if (GET_CODE (target) == MEM
5234 && !MEM_KEEP_ALIAS_SET_P (target)
5235 && TREE_CODE (type) == ARRAY_TYPE
5236 && TYPE_NONALIASED_COMPONENT (type))
5238 target = copy_rtx (target);
5239 MEM_KEEP_ALIAS_SET_P (target) = 1;
5242 store_constructor_field
5243 (target, bitsize, bitpos, mode, value, type, cleared,
5244 get_alias_set (elttype));
5249 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5250 loop_end = gen_label_rtx ();
5252 unsignedp = TREE_UNSIGNED (domain);
5254 index = build_decl (VAR_DECL, NULL_TREE, domain);
5257 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5259 SET_DECL_RTL (index, index_r);
5260 if (TREE_CODE (value) == SAVE_EXPR
5261 && SAVE_EXPR_RTL (value) == 0)
5263 /* Make sure value gets expanded once before the
5265 expand_expr (value, const0_rtx, VOIDmode, 0);
5268 store_expr (lo_index, index_r, 0);
5269 loop = expand_start_loop (0);
5271 /* Assign value to element index. */
5273 = convert (ssizetype,
5274 fold (build (MINUS_EXPR, TREE_TYPE (index),
5275 index, TYPE_MIN_VALUE (domain))));
5276 position = size_binop (MULT_EXPR, position,
5278 TYPE_SIZE_UNIT (elttype)));
5280 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5281 xtarget = offset_address (target, pos_rtx,
5282 highest_pow2_factor (position));
5283 xtarget = adjust_address (xtarget, mode, 0);
5284 if (TREE_CODE (value) == CONSTRUCTOR)
5285 store_constructor (value, xtarget, cleared,
5286 bitsize / BITS_PER_UNIT);
5288 store_expr (value, xtarget, 0);
5290 expand_exit_loop_if_false (loop,
5291 build (LT_EXPR, integer_type_node,
5294 expand_increment (build (PREINCREMENT_EXPR,
5296 index, integer_one_node), 0, 0);
5298 emit_label (loop_end);
5301 else if ((index != 0 && ! host_integerp (index, 0))
5302 || ! host_integerp (TYPE_SIZE (elttype), 1))
5307 index = ssize_int (1);
5310 index = convert (ssizetype,
5311 fold (build (MINUS_EXPR, index,
5312 TYPE_MIN_VALUE (domain))));
5314 position = size_binop (MULT_EXPR, index,
5316 TYPE_SIZE_UNIT (elttype)));
5317 xtarget = offset_address (target,
5318 expand_expr (position, 0, VOIDmode, 0),
5319 highest_pow2_factor (position));
5320 xtarget = adjust_address (xtarget, mode, 0);
5321 store_expr (value, xtarget, 0);
5326 bitpos = ((tree_low_cst (index, 0) - minelt)
5327 * tree_low_cst (TYPE_SIZE (elttype), 1));
5329 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5331 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5332 && TREE_CODE (type) == ARRAY_TYPE
5333 && TYPE_NONALIASED_COMPONENT (type))
5335 target = copy_rtx (target);
5336 MEM_KEEP_ALIAS_SET_P (target) = 1;
5339 store_constructor_field (target, bitsize, bitpos, mode, value,
5340 type, cleared, get_alias_set (elttype));
5346 /* Set constructor assignments. */
5347 else if (TREE_CODE (type) == SET_TYPE)
5349 tree elt = CONSTRUCTOR_ELTS (exp);
5350 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5351 tree domain = TYPE_DOMAIN (type);
5352 tree domain_min, domain_max, bitlength;
5354 /* The default implementation strategy is to extract the constant
5355 parts of the constructor, use that to initialize the target,
5356 and then "or" in whatever non-constant ranges we need in addition.
5358 If a large set is all zero or all ones, it is
5359 probably better to set it using memset (if available) or bzero.
5360 Also, if a large set has just a single range, it may also be
5361 better to first clear all the first clear the set (using
5362 bzero/memset), and set the bits we want. */
5364 /* Check for all zeros. */
5365 if (elt == NULL_TREE && size > 0)
5368 clear_storage (target, GEN_INT (size));
5372 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5373 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5374 bitlength = size_binop (PLUS_EXPR,
5375 size_diffop (domain_max, domain_min),
5378 nbits = tree_low_cst (bitlength, 1);
5380 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5381 are "complicated" (more than one range), initialize (the
5382 constant parts) by copying from a constant. */
5383 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5384 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5386 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5387 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5388 char *bit_buffer = (char *) alloca (nbits);
5389 HOST_WIDE_INT word = 0;
5390 unsigned int bit_pos = 0;
5391 unsigned int ibit = 0;
5392 unsigned int offset = 0; /* In bytes from beginning of set. */
5394 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5397 if (bit_buffer[ibit])
5399 if (BYTES_BIG_ENDIAN)
5400 word |= (1 << (set_word_size - 1 - bit_pos));
5402 word |= 1 << bit_pos;
5406 if (bit_pos >= set_word_size || ibit == nbits)
5408 if (word != 0 || ! cleared)
5410 rtx datum = GEN_INT (word);
5413 /* The assumption here is that it is safe to use
5414 XEXP if the set is multi-word, but not if
5415 it's single-word. */
5416 if (GET_CODE (target) == MEM)
5417 to_rtx = adjust_address (target, mode, offset);
5418 else if (offset == 0)
5422 emit_move_insn (to_rtx, datum);
5429 offset += set_word_size / BITS_PER_UNIT;
5434 /* Don't bother clearing storage if the set is all ones. */
5435 if (TREE_CHAIN (elt) != NULL_TREE
5436 || (TREE_PURPOSE (elt) == NULL_TREE
5438 : ( ! host_integerp (TREE_VALUE (elt), 0)
5439 || ! host_integerp (TREE_PURPOSE (elt), 0)
5440 || (tree_low_cst (TREE_VALUE (elt), 0)
5441 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5442 != (HOST_WIDE_INT) nbits))))
5443 clear_storage (target, expr_size (exp));
5445 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5447 /* Start of range of element or NULL. */
5448 tree startbit = TREE_PURPOSE (elt);
5449 /* End of range of element, or element value. */
5450 tree endbit = TREE_VALUE (elt);
5451 HOST_WIDE_INT startb, endb;
5452 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5454 bitlength_rtx = expand_expr (bitlength,
5455 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5457 /* Handle non-range tuple element like [ expr ]. */
5458 if (startbit == NULL_TREE)
5460 startbit = save_expr (endbit);
5464 startbit = convert (sizetype, startbit);
5465 endbit = convert (sizetype, endbit);
5466 if (! integer_zerop (domain_min))
5468 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5469 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5471 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5472 EXPAND_CONST_ADDRESS);
5473 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5474 EXPAND_CONST_ADDRESS);
5480 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5481 (GET_MODE (target), 0),
5484 emit_move_insn (targetx, target);
5487 else if (GET_CODE (target) == MEM)
5492 /* Optimization: If startbit and endbit are constants divisible
5493 by BITS_PER_UNIT, call memset instead. */
5494 if (TARGET_MEM_FUNCTIONS
5495 && TREE_CODE (startbit) == INTEGER_CST
5496 && TREE_CODE (endbit) == INTEGER_CST
5497 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5498 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5500 emit_library_call (memset_libfunc, LCT_NORMAL,
5502 plus_constant (XEXP (targetx, 0),
5503 startb / BITS_PER_UNIT),
5505 constm1_rtx, TYPE_MODE (integer_type_node),
5506 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5507 TYPE_MODE (sizetype));
5510 emit_library_call (setbits_libfunc, LCT_NORMAL,
5511 VOIDmode, 4, XEXP (targetx, 0),
5512 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5513 startbit_rtx, TYPE_MODE (sizetype),
5514 endbit_rtx, TYPE_MODE (sizetype));
5517 emit_move_insn (target, targetx);
5525 /* Store the value of EXP (an expression tree)
5526 into a subfield of TARGET which has mode MODE and occupies
5527 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5528 If MODE is VOIDmode, it means that we are storing into a bit-field.
5530 If VALUE_MODE is VOIDmode, return nothing in particular.
5531 UNSIGNEDP is not used in this case.
5533 Otherwise, return an rtx for the value stored. This rtx
5534 has mode VALUE_MODE if that is convenient to do.
5535 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5537 TYPE is the type of the underlying object,
5539 ALIAS_SET is the alias set for the destination. This value will
5540 (in general) be different from that for TARGET, since TARGET is a
5541 reference to the containing structure. */
5544 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5545 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5546 int unsignedp, tree type, int alias_set)
5548 HOST_WIDE_INT width_mask = 0;
5550 if (TREE_CODE (exp) == ERROR_MARK)
5553 /* If we have nothing to store, do nothing unless the expression has
5556 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5557 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5558 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5560 /* If we are storing into an unaligned field of an aligned union that is
5561 in a register, we may have the mode of TARGET being an integer mode but
5562 MODE == BLKmode. In that case, get an aligned object whose size and
5563 alignment are the same as TARGET and store TARGET into it (we can avoid
5564 the store if the field being stored is the entire width of TARGET). Then
5565 call ourselves recursively to store the field into a BLKmode version of
5566 that object. Finally, load from the object into TARGET. This is not
5567 very efficient in general, but should only be slightly more expensive
5568 than the otherwise-required unaligned accesses. Perhaps this can be
5569 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5570 twice, once with emit_move_insn and once via store_field. */
5573 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5575 rtx object = assign_temp (type, 0, 1, 1);
5576 rtx blk_object = adjust_address (object, BLKmode, 0);
5578 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5579 emit_move_insn (object, target);
5581 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5584 emit_move_insn (target, object);
5586 /* We want to return the BLKmode version of the data. */
5590 if (GET_CODE (target) == CONCAT)
5592 /* We're storing into a struct containing a single __complex. */
5596 return store_expr (exp, target, 0);
5599 /* If the structure is in a register or if the component
5600 is a bit field, we cannot use addressing to access it.
5601 Use bit-field techniques or SUBREG to store in it. */
5603 if (mode == VOIDmode
5604 || (mode != BLKmode && ! direct_store[(int) mode]
5605 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5606 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5607 || GET_CODE (target) == REG
5608 || GET_CODE (target) == SUBREG
5609 /* If the field isn't aligned enough to store as an ordinary memref,
5610 store it as a bit field. */
5612 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5613 || bitpos % GET_MODE_ALIGNMENT (mode))
5614 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5615 || (bitpos % BITS_PER_UNIT != 0)))
5616 /* If the RHS and field are a constant size and the size of the
5617 RHS isn't the same size as the bitfield, we must use bitfield
5620 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5621 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5623 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5625 /* If BITSIZE is narrower than the size of the type of EXP
5626 we will be narrowing TEMP. Normally, what's wanted are the
5627 low-order bits. However, if EXP's type is a record and this is
5628 big-endian machine, we want the upper BITSIZE bits. */
5629 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5630 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5631 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5632 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5633 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5637 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5639 if (mode != VOIDmode && mode != BLKmode
5640 && mode != TYPE_MODE (TREE_TYPE (exp)))
5641 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5643 /* If the modes of TARGET and TEMP are both BLKmode, both
5644 must be in memory and BITPOS must be aligned on a byte
5645 boundary. If so, we simply do a block copy. */
5646 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5648 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5649 || bitpos % BITS_PER_UNIT != 0)
5652 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5653 emit_block_move (target, temp,
5654 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5658 return value_mode == VOIDmode ? const0_rtx : target;
5661 /* Store the value in the bitfield. */
5662 store_bit_field (target, bitsize, bitpos, mode, temp,
5663 int_size_in_bytes (type));
5665 if (value_mode != VOIDmode)
5667 /* The caller wants an rtx for the value.
5668 If possible, avoid refetching from the bitfield itself. */
5670 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5673 enum machine_mode tmode;
5675 tmode = GET_MODE (temp);
5676 if (tmode == VOIDmode)
5680 return expand_and (tmode, temp,
5681 gen_int_mode (width_mask, tmode),
5684 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5685 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5686 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5689 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5690 NULL_RTX, value_mode, VOIDmode,
5691 int_size_in_bytes (type));
5697 rtx addr = XEXP (target, 0);
5698 rtx to_rtx = target;
5700 /* If a value is wanted, it must be the lhs;
5701 so make the address stable for multiple use. */
5703 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5704 && ! CONSTANT_ADDRESS_P (addr)
5705 /* A frame-pointer reference is already stable. */
5706 && ! (GET_CODE (addr) == PLUS
5707 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5708 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5709 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5710 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5712 /* Now build a reference to just the desired component. */
5714 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5716 if (to_rtx == target)
5717 to_rtx = copy_rtx (to_rtx);
5719 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5720 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5721 set_mem_alias_set (to_rtx, alias_set);
5723 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5727 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5728 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5729 codes and find the ultimate containing object, which we return.
5731 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5732 bit position, and *PUNSIGNEDP to the signedness of the field.
5733 If the position of the field is variable, we store a tree
5734 giving the variable offset (in units) in *POFFSET.
5735 This offset is in addition to the bit position.
5736 If the position is not variable, we store 0 in *POFFSET.
5738 If any of the extraction expressions is volatile,
5739 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5741 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5742 is a mode that can be used to access the field. In that case, *PBITSIZE
5745 If the field describes a variable-sized object, *PMODE is set to
5746 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5747 this case, but the address of the object can be found. */
5750 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5751 HOST_WIDE_INT *pbitpos, tree *poffset,
5752 enum machine_mode *pmode, int *punsignedp,
5756 enum machine_mode mode = VOIDmode;
5757 tree offset = size_zero_node;
5758 tree bit_offset = bitsize_zero_node;
5759 tree placeholder_ptr = 0;
5762 /* First get the mode, signedness, and size. We do this from just the
5763 outermost expression. */
5764 if (TREE_CODE (exp) == COMPONENT_REF)
5766 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5767 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5768 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5770 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5772 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5774 size_tree = TREE_OPERAND (exp, 1);
5775 *punsignedp = TREE_UNSIGNED (exp);
5779 mode = TYPE_MODE (TREE_TYPE (exp));
5780 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5782 if (mode == BLKmode)
5783 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5785 *pbitsize = GET_MODE_BITSIZE (mode);
5790 if (! host_integerp (size_tree, 1))
5791 mode = BLKmode, *pbitsize = -1;
5793 *pbitsize = tree_low_cst (size_tree, 1);
5796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5797 and find the ultimate containing object. */
5800 if (TREE_CODE (exp) == BIT_FIELD_REF)
5801 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5802 else if (TREE_CODE (exp) == COMPONENT_REF)
5804 tree field = TREE_OPERAND (exp, 1);
5805 tree this_offset = DECL_FIELD_OFFSET (field);
5807 /* If this field hasn't been filled in yet, don't go
5808 past it. This should only happen when folding expressions
5809 made during type construction. */
5810 if (this_offset == 0)
5812 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5813 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5815 offset = size_binop (PLUS_EXPR, offset, this_offset);
5816 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5817 DECL_FIELD_BIT_OFFSET (field));
5819 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5822 else if (TREE_CODE (exp) == ARRAY_REF
5823 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5825 tree index = TREE_OPERAND (exp, 1);
5826 tree array = TREE_OPERAND (exp, 0);
5827 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5828 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5829 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5831 /* We assume all arrays have sizes that are a multiple of a byte.
5832 First subtract the lower bound, if any, in the type of the
5833 index, then convert to sizetype and multiply by the size of the
5835 if (low_bound != 0 && ! integer_zerop (low_bound))
5836 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5839 /* If the index has a self-referential type, pass it to a
5840 WITH_RECORD_EXPR; if the component size is, pass our
5841 component to one. */
5842 if (CONTAINS_PLACEHOLDER_P (index))
5843 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5844 if (CONTAINS_PLACEHOLDER_P (unit_size))
5845 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5847 offset = size_binop (PLUS_EXPR, offset,
5848 size_binop (MULT_EXPR,
5849 convert (sizetype, index),
5853 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5855 tree new = find_placeholder (exp, &placeholder_ptr);
5857 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5858 We might have been called from tree optimization where we
5859 haven't set up an object yet. */
5868 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5869 conversions that don't change the mode, and all view conversions
5870 except those that need to "step up" the alignment. */
5871 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5872 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5873 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5874 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5876 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5877 < BIGGEST_ALIGNMENT)
5878 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5879 || TYPE_ALIGN_OK (TREE_TYPE
5880 (TREE_OPERAND (exp, 0))))))
5881 && ! ((TREE_CODE (exp) == NOP_EXPR
5882 || TREE_CODE (exp) == CONVERT_EXPR)
5883 && (TYPE_MODE (TREE_TYPE (exp))
5884 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5887 /* If any reference in the chain is volatile, the effect is volatile. */
5888 if (TREE_THIS_VOLATILE (exp))
5891 exp = TREE_OPERAND (exp, 0);
5894 /* If OFFSET is constant, see if we can return the whole thing as a
5895 constant bit position. Otherwise, split it up. */
5896 if (host_integerp (offset, 0)
5897 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5899 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5900 && host_integerp (tem, 0))
5901 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5903 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5909 /* Return 1 if T is an expression that get_inner_reference handles. */
5912 handled_component_p (tree t)
5914 switch (TREE_CODE (t))
5919 case ARRAY_RANGE_REF:
5920 case NON_LVALUE_EXPR:
5921 case VIEW_CONVERT_EXPR:
5924 /* ??? Sure they are handled, but get_inner_reference may return
5925 a different PBITSIZE, depending upon whether the expression is
5926 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5929 return (TYPE_MODE (TREE_TYPE (t))
5930 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5937 /* Given an rtx VALUE that may contain additions and multiplications, return
5938 an equivalent value that just refers to a register, memory, or constant.
5939 This is done by generating instructions to perform the arithmetic and
5940 returning a pseudo-register containing the value.
5942 The returned value may be a REG, SUBREG, MEM or constant. */
5945 force_operand (rtx value, rtx target)
5948 /* Use subtarget as the target for operand 0 of a binary operation. */
5949 rtx subtarget = get_subtarget (target);
5950 enum rtx_code code = GET_CODE (value);
5952 /* Check for a PIC address load. */
5953 if ((code == PLUS || code == MINUS)
5954 && XEXP (value, 0) == pic_offset_table_rtx
5955 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5956 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5957 || GET_CODE (XEXP (value, 1)) == CONST))
5960 subtarget = gen_reg_rtx (GET_MODE (value));
5961 emit_move_insn (subtarget, value);
5965 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5968 target = gen_reg_rtx (GET_MODE (value));
5969 convert_move (target, force_operand (XEXP (value, 0), NULL),
5970 code == ZERO_EXTEND);
5974 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5976 op2 = XEXP (value, 1);
5977 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5979 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5982 op2 = negate_rtx (GET_MODE (value), op2);
5985 /* Check for an addition with OP2 a constant integer and our first
5986 operand a PLUS of a virtual register and something else. In that
5987 case, we want to emit the sum of the virtual register and the
5988 constant first and then add the other value. This allows virtual
5989 register instantiation to simply modify the constant rather than
5990 creating another one around this addition. */
5991 if (code == PLUS && GET_CODE (op2) == CONST_INT
5992 && GET_CODE (XEXP (value, 0)) == PLUS
5993 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5994 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5995 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5997 rtx temp = expand_simple_binop (GET_MODE (value), code,
5998 XEXP (XEXP (value, 0), 0), op2,
5999 subtarget, 0, OPTAB_LIB_WIDEN);
6000 return expand_simple_binop (GET_MODE (value), code, temp,
6001 force_operand (XEXP (XEXP (value,
6003 target, 0, OPTAB_LIB_WIDEN);
6006 op1 = force_operand (XEXP (value, 0), subtarget);
6007 op2 = force_operand (op2, NULL_RTX);
6011 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6013 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6014 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6015 target, 1, OPTAB_LIB_WIDEN);
6017 return expand_divmod (0,
6018 FLOAT_MODE_P (GET_MODE (value))
6019 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6020 GET_MODE (value), op1, op2, target, 0);
6023 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6027 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6031 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6035 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6036 target, 0, OPTAB_LIB_WIDEN);
6039 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6040 target, 1, OPTAB_LIB_WIDEN);
6043 if (GET_RTX_CLASS (code) == '1')
6045 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6046 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6049 #ifdef INSN_SCHEDULING
6050 /* On machines that have insn scheduling, we want all memory reference to be
6051 explicit, so we need to deal with such paradoxical SUBREGs. */
6052 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6053 && (GET_MODE_SIZE (GET_MODE (value))
6054 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6056 = simplify_gen_subreg (GET_MODE (value),
6057 force_reg (GET_MODE (SUBREG_REG (value)),
6058 force_operand (SUBREG_REG (value),
6060 GET_MODE (SUBREG_REG (value)),
6061 SUBREG_BYTE (value));
6067 /* Subroutine of expand_expr: return nonzero iff there is no way that
6068 EXP can reference X, which is being modified. TOP_P is nonzero if this
6069 call is going to be used to determine whether we need a temporary
6070 for EXP, as opposed to a recursive call to this function.
6072 It is always safe for this routine to return zero since it merely
6073 searches for optimization opportunities. */
6076 safe_from_p (rtx x, tree exp, int top_p)
6080 static tree save_expr_list;
6083 /* If EXP has varying size, we MUST use a target since we currently
6084 have no way of allocating temporaries of variable size
6085 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6086 So we assume here that something at a higher level has prevented a
6087 clash. This is somewhat bogus, but the best we can do. Only
6088 do this when X is BLKmode and when we are at the top level. */
6089 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6090 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6091 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6092 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6093 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6095 && GET_MODE (x) == BLKmode)
6096 /* If X is in the outgoing argument area, it is always safe. */
6097 || (GET_CODE (x) == MEM
6098 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6099 || (GET_CODE (XEXP (x, 0)) == PLUS
6100 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6103 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6104 find the underlying pseudo. */
6105 if (GET_CODE (x) == SUBREG)
6108 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6112 /* A SAVE_EXPR might appear many times in the expression passed to the
6113 top-level safe_from_p call, and if it has a complex subexpression,
6114 examining it multiple times could result in a combinatorial explosion.
6115 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6116 with optimization took about 28 minutes to compile -- even though it was
6117 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6118 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6119 we have processed. Note that the only test of top_p was above. */
6128 rtn = safe_from_p (x, exp, 0);
6130 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6131 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6136 /* Now look at our tree code and possibly recurse. */
6137 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6140 exp_rtl = DECL_RTL_IF_SET (exp);
6147 if (TREE_CODE (exp) == TREE_LIST)
6151 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6153 exp = TREE_CHAIN (exp);
6156 if (TREE_CODE (exp) != TREE_LIST)
6157 return safe_from_p (x, exp, 0);
6160 else if (TREE_CODE (exp) == ERROR_MARK)
6161 return 1; /* An already-visited SAVE_EXPR? */
6167 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6172 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6176 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6177 the expression. If it is set, we conflict iff we are that rtx or
6178 both are in memory. Otherwise, we check all operands of the
6179 expression recursively. */
6181 switch (TREE_CODE (exp))
6184 /* If the operand is static or we are static, we can't conflict.
6185 Likewise if we don't conflict with the operand at all. */
6186 if (staticp (TREE_OPERAND (exp, 0))
6187 || TREE_STATIC (exp)
6188 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6191 /* Otherwise, the only way this can conflict is if we are taking
6192 the address of a DECL a that address if part of X, which is
6194 exp = TREE_OPERAND (exp, 0);
6197 if (!DECL_RTL_SET_P (exp)
6198 || GET_CODE (DECL_RTL (exp)) != MEM)
6201 exp_rtl = XEXP (DECL_RTL (exp), 0);
6206 if (GET_CODE (x) == MEM
6207 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6208 get_alias_set (exp)))
6213 /* Assume that the call will clobber all hard registers and
6215 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6216 || GET_CODE (x) == MEM)
6221 /* If a sequence exists, we would have to scan every instruction
6222 in the sequence to see if it was safe. This is probably not
6224 if (RTL_EXPR_SEQUENCE (exp))
6227 exp_rtl = RTL_EXPR_RTL (exp);
6230 case WITH_CLEANUP_EXPR:
6231 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6234 case CLEANUP_POINT_EXPR:
6235 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6238 exp_rtl = SAVE_EXPR_RTL (exp);
6242 /* If we've already scanned this, don't do it again. Otherwise,
6243 show we've scanned it and record for clearing the flag if we're
6245 if (TREE_PRIVATE (exp))
6248 TREE_PRIVATE (exp) = 1;
6249 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6251 TREE_PRIVATE (exp) = 0;
6255 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6259 /* The only operand we look at is operand 1. The rest aren't
6260 part of the expression. */
6261 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6263 case METHOD_CALL_EXPR:
6264 /* This takes an rtx argument, but shouldn't appear here. */
6271 /* If we have an rtx, we do not need to scan our operands. */
6275 nops = first_rtl_op (TREE_CODE (exp));
6276 for (i = 0; i < nops; i++)
6277 if (TREE_OPERAND (exp, i) != 0
6278 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6281 /* If this is a language-specific tree code, it may require
6282 special handling. */
6283 if ((unsigned int) TREE_CODE (exp)
6284 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6285 && !(*lang_hooks.safe_from_p) (x, exp))
6289 /* If we have an rtl, find any enclosed object. Then see if we conflict
6293 if (GET_CODE (exp_rtl) == SUBREG)
6295 exp_rtl = SUBREG_REG (exp_rtl);
6296 if (GET_CODE (exp_rtl) == REG
6297 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6301 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6302 are memory and they conflict. */
6303 return ! (rtx_equal_p (x, exp_rtl)
6304 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6305 && true_dependence (exp_rtl, VOIDmode, x,
6306 rtx_addr_varies_p)));
6309 /* If we reach here, it is safe. */
6313 /* Subroutine of expand_expr: return rtx if EXP is a
6314 variable or parameter; else return 0. */
6320 switch (TREE_CODE (exp))
6324 return DECL_RTL (exp);
6330 #ifdef MAX_INTEGER_COMPUTATION_MODE
6333 check_max_integer_computation_mode (tree exp)
6335 enum tree_code code;
6336 enum machine_mode mode;
6338 /* Strip any NOPs that don't change the mode. */
6340 code = TREE_CODE (exp);
6342 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6343 if (code == NOP_EXPR
6344 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6347 /* First check the type of the overall operation. We need only look at
6348 unary, binary and relational operations. */
6349 if (TREE_CODE_CLASS (code) == '1'
6350 || TREE_CODE_CLASS (code) == '2'
6351 || TREE_CODE_CLASS (code) == '<')
6353 mode = TYPE_MODE (TREE_TYPE (exp));
6354 if (GET_MODE_CLASS (mode) == MODE_INT
6355 && mode > MAX_INTEGER_COMPUTATION_MODE)
6356 internal_error ("unsupported wide integer operation");
6359 /* Check operand of a unary op. */
6360 if (TREE_CODE_CLASS (code) == '1')
6362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6363 if (GET_MODE_CLASS (mode) == MODE_INT
6364 && mode > MAX_INTEGER_COMPUTATION_MODE)
6365 internal_error ("unsupported wide integer operation");
6368 /* Check operands of a binary/comparison op. */
6369 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6372 if (GET_MODE_CLASS (mode) == MODE_INT
6373 && mode > MAX_INTEGER_COMPUTATION_MODE)
6374 internal_error ("unsupported wide integer operation");
6376 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6377 if (GET_MODE_CLASS (mode) == MODE_INT
6378 && mode > MAX_INTEGER_COMPUTATION_MODE)
6379 internal_error ("unsupported wide integer operation");
6384 /* Return the highest power of two that EXP is known to be a multiple of.
6385 This is used in updating alignment of MEMs in array references. */
6387 static unsigned HOST_WIDE_INT
6388 highest_pow2_factor (tree exp)
6390 unsigned HOST_WIDE_INT c0, c1;
6392 switch (TREE_CODE (exp))
6395 /* We can find the lowest bit that's a one. If the low
6396 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6397 We need to handle this case since we can find it in a COND_EXPR,
6398 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6399 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6401 if (TREE_CONSTANT_OVERFLOW (exp))
6402 return BIGGEST_ALIGNMENT;
6405 /* Note: tree_low_cst is intentionally not used here,
6406 we don't care about the upper bits. */
6407 c0 = TREE_INT_CST_LOW (exp);
6409 return c0 ? c0 : BIGGEST_ALIGNMENT;
6413 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6414 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6415 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6416 return MIN (c0, c1);
6419 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6420 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6423 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6425 if (integer_pow2p (TREE_OPERAND (exp, 1))
6426 && host_integerp (TREE_OPERAND (exp, 1), 1))
6428 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6429 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6430 return MAX (1, c0 / c1);
6434 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6435 case SAVE_EXPR: case WITH_RECORD_EXPR:
6436 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6439 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6442 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6443 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6444 return MIN (c0, c1);
6453 /* Similar, except that it is known that the expression must be a multiple
6454 of the alignment of TYPE. */
6456 static unsigned HOST_WIDE_INT
6457 highest_pow2_factor_for_type (tree type, tree exp)
6459 unsigned HOST_WIDE_INT type_align, factor;
6461 factor = highest_pow2_factor (exp);
6462 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6463 return MAX (factor, type_align);
6466 /* Return an object on the placeholder list that matches EXP, a
6467 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6468 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6469 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6470 is a location which initially points to a starting location in the
6471 placeholder list (zero means start of the list) and where a pointer into
6472 the placeholder list at which the object is found is placed. */
6475 find_placeholder (tree exp, tree *plist)
6477 tree type = TREE_TYPE (exp);
6478 tree placeholder_expr;
6480 for (placeholder_expr
6481 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6482 placeholder_expr != 0;
6483 placeholder_expr = TREE_CHAIN (placeholder_expr))
6485 tree need_type = TYPE_MAIN_VARIANT (type);
6488 /* Find the outermost reference that is of the type we want. If none,
6489 see if any object has a type that is a pointer to the type we
6491 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6492 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6493 || TREE_CODE (elt) == COND_EXPR)
6494 ? TREE_OPERAND (elt, 1)
6495 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6496 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6497 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6498 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6499 ? TREE_OPERAND (elt, 0) : 0))
6500 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6503 *plist = placeholder_expr;
6507 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6509 = ((TREE_CODE (elt) == COMPOUND_EXPR
6510 || TREE_CODE (elt) == COND_EXPR)
6511 ? TREE_OPERAND (elt, 1)
6512 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6513 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6514 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6515 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6516 ? TREE_OPERAND (elt, 0) : 0))
6517 if (POINTER_TYPE_P (TREE_TYPE (elt))
6518 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6522 *plist = placeholder_expr;
6523 return build1 (INDIRECT_REF, need_type, elt);
6530 /* expand_expr: generate code for computing expression EXP.
6531 An rtx for the computed value is returned. The value is never null.
6532 In the case of a void EXP, const0_rtx is returned.
6534 The value may be stored in TARGET if TARGET is nonzero.
6535 TARGET is just a suggestion; callers must assume that
6536 the rtx returned may not be the same as TARGET.
6538 If TARGET is CONST0_RTX, it means that the value will be ignored.
6540 If TMODE is not VOIDmode, it suggests generating the
6541 result in mode TMODE. But this is done only when convenient.
6542 Otherwise, TMODE is ignored and the value generated in its natural mode.
6543 TMODE is just a suggestion; callers must assume that
6544 the rtx returned may not have mode TMODE.
6546 Note that TARGET may have neither TMODE nor MODE. In that case, it
6547 probably will not be used.
6549 If MODIFIER is EXPAND_SUM then when EXP is an addition
6550 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6551 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6552 products as above, or REG or MEM, or constant.
6553 Ordinarily in such cases we would output mul or add instructions
6554 and then return a pseudo reg containing the sum.
6556 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6557 it also marks a label as absolutely required (it can't be dead).
6558 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6559 This is used for outputting expressions used in initializers.
6561 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6562 with a constant address even if that address is not normally legitimate.
6563 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6565 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6566 a call parameter. Such targets require special care as we haven't yet
6567 marked TARGET so that it's safe from being trashed by libcalls. We
6568 don't want to use TARGET for anything but the final result;
6569 Intermediate values must go elsewhere. Additionally, calls to
6570 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6573 expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
6576 tree type = TREE_TYPE (exp);
6577 int unsignedp = TREE_UNSIGNED (type);
6578 enum machine_mode mode;
6579 enum tree_code code = TREE_CODE (exp);
6581 rtx subtarget, original_target;
6585 /* Handle ERROR_MARK before anybody tries to access its type. */
6586 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6588 op0 = CONST0_RTX (tmode);
6594 mode = TYPE_MODE (type);
6595 /* Use subtarget as the target for operand 0 of a binary operation. */
6596 subtarget = get_subtarget (target);
6597 original_target = target;
6598 ignore = (target == const0_rtx
6599 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6600 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6601 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6602 && TREE_CODE (type) == VOID_TYPE));
6604 /* If we are going to ignore this result, we need only do something
6605 if there is a side-effect somewhere in the expression. If there
6606 is, short-circuit the most common cases here. Note that we must
6607 not call expand_expr with anything but const0_rtx in case this
6608 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6612 if (! TREE_SIDE_EFFECTS (exp))
6615 /* Ensure we reference a volatile object even if value is ignored, but
6616 don't do this if all we are doing is taking its address. */
6617 if (TREE_THIS_VOLATILE (exp)
6618 && TREE_CODE (exp) != FUNCTION_DECL
6619 && mode != VOIDmode && mode != BLKmode
6620 && modifier != EXPAND_CONST_ADDRESS)
6622 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6623 if (GET_CODE (temp) == MEM)
6624 temp = copy_to_reg (temp);
6628 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6629 || code == INDIRECT_REF || code == BUFFER_REF)
6630 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6633 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6634 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6636 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6637 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6640 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6641 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6642 /* If the second operand has no side effects, just evaluate
6644 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6646 else if (code == BIT_FIELD_REF)
6648 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6649 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6650 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6657 #ifdef MAX_INTEGER_COMPUTATION_MODE
6658 /* Only check stuff here if the mode we want is different from the mode
6659 of the expression; if it's the same, check_max_integer_computation_mode
6660 will handle it. Do we really need to check this stuff at all? */
6663 && GET_MODE (target) != mode
6664 && TREE_CODE (exp) != INTEGER_CST
6665 && TREE_CODE (exp) != PARM_DECL
6666 && TREE_CODE (exp) != ARRAY_REF
6667 && TREE_CODE (exp) != ARRAY_RANGE_REF
6668 && TREE_CODE (exp) != COMPONENT_REF
6669 && TREE_CODE (exp) != BIT_FIELD_REF
6670 && TREE_CODE (exp) != INDIRECT_REF
6671 && TREE_CODE (exp) != CALL_EXPR
6672 && TREE_CODE (exp) != VAR_DECL
6673 && TREE_CODE (exp) != RTL_EXPR)
6675 enum machine_mode mode = GET_MODE (target);
6677 if (GET_MODE_CLASS (mode) == MODE_INT
6678 && mode > MAX_INTEGER_COMPUTATION_MODE)
6679 internal_error ("unsupported wide integer operation");
6683 && TREE_CODE (exp) != INTEGER_CST
6684 && TREE_CODE (exp) != PARM_DECL
6685 && TREE_CODE (exp) != ARRAY_REF
6686 && TREE_CODE (exp) != ARRAY_RANGE_REF
6687 && TREE_CODE (exp) != COMPONENT_REF
6688 && TREE_CODE (exp) != BIT_FIELD_REF
6689 && TREE_CODE (exp) != INDIRECT_REF
6690 && TREE_CODE (exp) != VAR_DECL
6691 && TREE_CODE (exp) != CALL_EXPR
6692 && TREE_CODE (exp) != RTL_EXPR
6693 && GET_MODE_CLASS (tmode) == MODE_INT
6694 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6695 internal_error ("unsupported wide integer operation");
6697 check_max_integer_computation_mode (exp);
6700 /* If will do cse, generate all results into pseudo registers
6701 since 1) that allows cse to find more things
6702 and 2) otherwise cse could produce an insn the machine
6703 cannot support. An exception is a CONSTRUCTOR into a multi-word
6704 MEM: that's much more likely to be most efficient into the MEM.
6705 Another is a CALL_EXPR which must return in memory. */
6707 if (! cse_not_expected && mode != BLKmode && target
6708 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6709 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6710 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6717 tree function = decl_function_context (exp);
6718 /* Labels in containing functions, or labels used from initializers,
6720 if (modifier == EXPAND_INITIALIZER
6721 || (function != current_function_decl
6722 && function != inline_function_decl
6724 temp = force_label_rtx (exp);
6726 temp = label_rtx (exp);
6728 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6729 if (function != current_function_decl
6730 && function != inline_function_decl && function != 0)
6731 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6736 if (!DECL_RTL_SET_P (exp))
6738 error_with_decl (exp, "prior parameter's size depends on `%s'");
6739 return CONST0_RTX (mode);
6742 /* ... fall through ... */
6745 /* If a static var's type was incomplete when the decl was written,
6746 but the type is complete now, lay out the decl now. */
6747 if (DECL_SIZE (exp) == 0
6748 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6749 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6750 layout_decl (exp, 0);
6752 /* ... fall through ... */
6756 if (DECL_RTL (exp) == 0)
6759 /* Ensure variable marked as used even if it doesn't go through
6760 a parser. If it hasn't be used yet, write out an external
6762 if (! TREE_USED (exp))
6764 assemble_external (exp);
6765 TREE_USED (exp) = 1;
6768 /* Show we haven't gotten RTL for this yet. */
6771 /* Handle variables inherited from containing functions. */
6772 context = decl_function_context (exp);
6774 /* We treat inline_function_decl as an alias for the current function
6775 because that is the inline function whose vars, types, etc.
6776 are being merged into the current function.
6777 See expand_inline_function. */
6779 if (context != 0 && context != current_function_decl
6780 && context != inline_function_decl
6781 /* If var is static, we don't need a static chain to access it. */
6782 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6783 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6787 /* Mark as non-local and addressable. */
6788 DECL_NONLOCAL (exp) = 1;
6789 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6791 (*lang_hooks.mark_addressable) (exp);
6792 if (GET_CODE (DECL_RTL (exp)) != MEM)
6794 addr = XEXP (DECL_RTL (exp), 0);
6795 if (GET_CODE (addr) == MEM)
6797 = replace_equiv_address (addr,
6798 fix_lexical_addr (XEXP (addr, 0), exp));
6800 addr = fix_lexical_addr (addr, exp);
6802 temp = replace_equiv_address (DECL_RTL (exp), addr);
6805 /* This is the case of an array whose size is to be determined
6806 from its initializer, while the initializer is still being parsed.
6809 else if (GET_CODE (DECL_RTL (exp)) == MEM
6810 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6811 temp = validize_mem (DECL_RTL (exp));
6813 /* If DECL_RTL is memory, we are in the normal case and either
6814 the address is not valid or it is not a register and -fforce-addr
6815 is specified, get the address into a register. */
6817 else if (GET_CODE (DECL_RTL (exp)) == MEM
6818 && modifier != EXPAND_CONST_ADDRESS
6819 && modifier != EXPAND_SUM
6820 && modifier != EXPAND_INITIALIZER
6821 && (! memory_address_p (DECL_MODE (exp),
6822 XEXP (DECL_RTL (exp), 0))
6824 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6825 temp = replace_equiv_address (DECL_RTL (exp),
6826 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6828 /* If we got something, return it. But first, set the alignment
6829 if the address is a register. */
6832 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6833 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6838 /* If the mode of DECL_RTL does not match that of the decl, it
6839 must be a promoted value. We return a SUBREG of the wanted mode,
6840 but mark it so that we know that it was already extended. */
6842 if (GET_CODE (DECL_RTL (exp)) == REG
6843 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6845 /* Get the signedness used for this variable. Ensure we get the
6846 same mode we got when the variable was declared. */
6847 if (GET_MODE (DECL_RTL (exp))
6848 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6849 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6852 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6853 SUBREG_PROMOTED_VAR_P (temp) = 1;
6854 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6858 return DECL_RTL (exp);
6861 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6862 TREE_INT_CST_HIGH (exp), mode);
6864 /* ??? If overflow is set, fold will have done an incomplete job,
6865 which can result in (plus xx (const_int 0)), which can get
6866 simplified by validate_replace_rtx during virtual register
6867 instantiation, which can result in unrecognizable insns.
6868 Avoid this by forcing all overflows into registers. */
6869 if (TREE_CONSTANT_OVERFLOW (exp)
6870 && modifier != EXPAND_INITIALIZER)
6871 temp = force_reg (mode, temp);
6876 return const_vector_from_tree (exp);
6879 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6882 /* If optimized, generate immediate CONST_DOUBLE
6883 which will be turned into memory by reload if necessary.
6885 We used to force a register so that loop.c could see it. But
6886 this does not allow gen_* patterns to perform optimizations with
6887 the constants. It also produces two insns in cases like "x = 1.0;".
6888 On most machines, floating-point constants are not permitted in
6889 many insns, so we'd end up copying it to a register in any case.
6891 Now, we do the copying in expand_binop, if appropriate. */
6892 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6893 TYPE_MODE (TREE_TYPE (exp)));
6896 /* Handle evaluating a complex constant in a CONCAT target. */
6897 if (original_target && GET_CODE (original_target) == CONCAT)
6899 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6902 rtarg = XEXP (original_target, 0);
6903 itarg = XEXP (original_target, 1);
6905 /* Move the real and imaginary parts separately. */
6906 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6907 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6910 emit_move_insn (rtarg, op0);
6912 emit_move_insn (itarg, op1);
6914 return original_target;
6917 /* ... fall through ... */
6920 temp = output_constant_def (exp, 1);
6922 /* temp contains a constant address.
6923 On RISC machines where a constant address isn't valid,
6924 make some insns to get that address into a register. */
6925 if (modifier != EXPAND_CONST_ADDRESS
6926 && modifier != EXPAND_INITIALIZER
6927 && modifier != EXPAND_SUM
6928 && (! memory_address_p (mode, XEXP (temp, 0))
6929 || flag_force_addr))
6930 return replace_equiv_address (temp,
6931 copy_rtx (XEXP (temp, 0)));
6934 case EXPR_WITH_FILE_LOCATION:
6937 location_t saved_loc = input_location;
6938 input_filename = EXPR_WFL_FILENAME (exp);
6939 input_line = EXPR_WFL_LINENO (exp);
6940 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6941 emit_line_note (input_location);
6942 /* Possibly avoid switching back and forth here. */
6943 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6944 input_location = saved_loc;
6949 context = decl_function_context (exp);
6951 /* If this SAVE_EXPR was at global context, assume we are an
6952 initialization function and move it into our context. */
6954 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6956 /* We treat inline_function_decl as an alias for the current function
6957 because that is the inline function whose vars, types, etc.
6958 are being merged into the current function.
6959 See expand_inline_function. */
6960 if (context == current_function_decl || context == inline_function_decl)
6963 /* If this is non-local, handle it. */
6966 /* The following call just exists to abort if the context is
6967 not of a containing function. */
6968 find_function_data (context);
6970 temp = SAVE_EXPR_RTL (exp);
6971 if (temp && GET_CODE (temp) == REG)
6973 put_var_into_stack (exp, /*rescan=*/true);
6974 temp = SAVE_EXPR_RTL (exp);
6976 if (temp == 0 || GET_CODE (temp) != MEM)
6979 replace_equiv_address (temp,
6980 fix_lexical_addr (XEXP (temp, 0), exp));
6982 if (SAVE_EXPR_RTL (exp) == 0)
6984 if (mode == VOIDmode)
6987 temp = assign_temp (build_qualified_type (type,
6989 | TYPE_QUAL_CONST)),
6992 SAVE_EXPR_RTL (exp) = temp;
6993 if (!optimize && GET_CODE (temp) == REG)
6994 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6997 /* If the mode of TEMP does not match that of the expression, it
6998 must be a promoted value. We pass store_expr a SUBREG of the
6999 wanted mode but mark it so that we know that it was already
7002 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7004 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7005 promote_mode (type, mode, &unsignedp, 0);
7006 SUBREG_PROMOTED_VAR_P (temp) = 1;
7007 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7010 if (temp == const0_rtx)
7011 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7013 store_expr (TREE_OPERAND (exp, 0), temp,
7014 modifier == EXPAND_STACK_PARM ? 2 : 0);
7016 TREE_USED (exp) = 1;
7019 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7020 must be a promoted value. We return a SUBREG of the wanted mode,
7021 but mark it so that we know that it was already extended. */
7023 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7024 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7026 /* Compute the signedness and make the proper SUBREG. */
7027 promote_mode (type, mode, &unsignedp, 0);
7028 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7029 SUBREG_PROMOTED_VAR_P (temp) = 1;
7030 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7034 return SAVE_EXPR_RTL (exp);
7039 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7040 TREE_OPERAND (exp, 0)
7041 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7045 case PLACEHOLDER_EXPR:
7047 tree old_list = placeholder_list;
7048 tree placeholder_expr = 0;
7050 exp = find_placeholder (exp, &placeholder_expr);
7054 placeholder_list = TREE_CHAIN (placeholder_expr);
7055 temp = expand_expr (exp, original_target, tmode, modifier);
7056 placeholder_list = old_list;
7060 case WITH_RECORD_EXPR:
7061 /* Put the object on the placeholder list, expand our first operand,
7062 and pop the list. */
7063 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7065 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7067 placeholder_list = TREE_CHAIN (placeholder_list);
7071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7072 expand_goto (TREE_OPERAND (exp, 0));
7074 expand_computed_goto (TREE_OPERAND (exp, 0));
7078 expand_exit_loop_if_false (NULL,
7079 invert_truthvalue (TREE_OPERAND (exp, 0)));
7082 case LABELED_BLOCK_EXPR:
7083 if (LABELED_BLOCK_BODY (exp))
7084 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7085 /* Should perhaps use expand_label, but this is simpler and safer. */
7086 do_pending_stack_adjust ();
7087 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7090 case EXIT_BLOCK_EXPR:
7091 if (EXIT_BLOCK_RETURN (exp))
7092 sorry ("returned value in block_exit_expr");
7093 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7098 expand_start_loop (1);
7099 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7107 tree vars = TREE_OPERAND (exp, 0);
7109 /* Need to open a binding contour here because
7110 if there are any cleanups they must be contained here. */
7111 expand_start_bindings (2);
7113 /* Mark the corresponding BLOCK for output in its proper place. */
7114 if (TREE_OPERAND (exp, 2) != 0
7115 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7116 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7118 /* If VARS have not yet been expanded, expand them now. */
7121 if (!DECL_RTL_SET_P (vars))
7123 expand_decl_init (vars);
7124 vars = TREE_CHAIN (vars);
7127 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7129 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7135 if (RTL_EXPR_SEQUENCE (exp))
7137 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7139 emit_insn (RTL_EXPR_SEQUENCE (exp));
7140 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7142 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7143 free_temps_for_rtl_expr (exp);
7144 return RTL_EXPR_RTL (exp);
7147 /* If we don't need the result, just ensure we evaluate any
7153 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7154 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7159 /* All elts simple constants => refer to a constant in memory. But
7160 if this is a non-BLKmode mode, let it store a field at a time
7161 since that should make a CONST_INT or CONST_DOUBLE when we
7162 fold. Likewise, if we have a target we can use, it is best to
7163 store directly into the target unless the type is large enough
7164 that memcpy will be used. If we are making an initializer and
7165 all operands are constant, put it in memory as well.
7167 FIXME: Avoid trying to fill vector constructors piece-meal.
7168 Output them with output_constant_def below unless we're sure
7169 they're zeros. This should go away when vector initializers
7170 are treated like VECTOR_CST instead of arrays.
7172 else if ((TREE_STATIC (exp)
7173 && ((mode == BLKmode
7174 && ! (target != 0 && safe_from_p (target, exp, 1)))
7175 || TREE_ADDRESSABLE (exp)
7176 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7177 && (! MOVE_BY_PIECES_P
7178 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7180 && ((TREE_CODE (type) == VECTOR_TYPE
7181 && !is_zeros_p (exp))
7182 || ! mostly_zeros_p (exp)))))
7183 || ((modifier == EXPAND_INITIALIZER
7184 || modifier == EXPAND_CONST_ADDRESS)
7185 && TREE_CONSTANT (exp)))
7187 rtx constructor = output_constant_def (exp, 1);
7189 if (modifier != EXPAND_CONST_ADDRESS
7190 && modifier != EXPAND_INITIALIZER
7191 && modifier != EXPAND_SUM)
7192 constructor = validize_mem (constructor);
7198 /* Handle calls that pass values in multiple non-contiguous
7199 locations. The Irix 6 ABI has examples of this. */
7200 if (target == 0 || ! safe_from_p (target, exp, 1)
7201 || GET_CODE (target) == PARALLEL
7202 || modifier == EXPAND_STACK_PARM)
7204 = assign_temp (build_qualified_type (type,
7206 | (TREE_READONLY (exp)
7207 * TYPE_QUAL_CONST))),
7208 0, TREE_ADDRESSABLE (exp), 1);
7210 store_constructor (exp, target, 0, int_expr_size (exp));
7216 tree exp1 = TREE_OPERAND (exp, 0);
7218 tree string = string_constant (exp1, &index);
7220 /* Try to optimize reads from const strings. */
7222 && TREE_CODE (string) == STRING_CST
7223 && TREE_CODE (index) == INTEGER_CST
7224 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7225 && GET_MODE_CLASS (mode) == MODE_INT
7226 && GET_MODE_SIZE (mode) == 1
7227 && modifier != EXPAND_WRITE)
7228 return gen_int_mode (TREE_STRING_POINTER (string)
7229 [TREE_INT_CST_LOW (index)], mode);
7231 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7232 op0 = memory_address (mode, op0);
7233 temp = gen_rtx_MEM (mode, op0);
7234 set_mem_attributes (temp, exp, 0);
7236 /* If we are writing to this object and its type is a record with
7237 readonly fields, we must mark it as readonly so it will
7238 conflict with readonly references to those fields. */
7239 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7240 RTX_UNCHANGING_P (temp) = 1;
7246 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7250 tree array = TREE_OPERAND (exp, 0);
7251 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7252 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7253 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7256 /* Optimize the special-case of a zero lower bound.
7258 We convert the low_bound to sizetype to avoid some problems
7259 with constant folding. (E.g. suppose the lower bound is 1,
7260 and its mode is QI. Without the conversion, (ARRAY
7261 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7262 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7264 if (! integer_zerop (low_bound))
7265 index = size_diffop (index, convert (sizetype, low_bound));
7267 /* Fold an expression like: "foo"[2].
7268 This is not done in fold so it won't happen inside &.
7269 Don't fold if this is for wide characters since it's too
7270 difficult to do correctly and this is a very rare case. */
7272 if (modifier != EXPAND_CONST_ADDRESS
7273 && modifier != EXPAND_INITIALIZER
7274 && modifier != EXPAND_MEMORY
7275 && TREE_CODE (array) == STRING_CST
7276 && TREE_CODE (index) == INTEGER_CST
7277 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7278 && GET_MODE_CLASS (mode) == MODE_INT
7279 && GET_MODE_SIZE (mode) == 1)
7280 return gen_int_mode (TREE_STRING_POINTER (array)
7281 [TREE_INT_CST_LOW (index)], mode);
7283 /* If this is a constant index into a constant array,
7284 just get the value from the array. Handle both the cases when
7285 we have an explicit constructor and when our operand is a variable
7286 that was declared const. */
7288 if (modifier != EXPAND_CONST_ADDRESS
7289 && modifier != EXPAND_INITIALIZER
7290 && modifier != EXPAND_MEMORY
7291 && TREE_CODE (array) == CONSTRUCTOR
7292 && ! TREE_SIDE_EFFECTS (array)
7293 && TREE_CODE (index) == INTEGER_CST
7294 && 0 > compare_tree_int (index,
7295 list_length (CONSTRUCTOR_ELTS
7296 (TREE_OPERAND (exp, 0)))))
7300 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7301 i = TREE_INT_CST_LOW (index);
7302 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7306 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7310 else if (optimize >= 1
7311 && modifier != EXPAND_CONST_ADDRESS
7312 && modifier != EXPAND_INITIALIZER
7313 && modifier != EXPAND_MEMORY
7314 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7315 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7316 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7318 if (TREE_CODE (index) == INTEGER_CST)
7320 tree init = DECL_INITIAL (array);
7322 if (TREE_CODE (init) == CONSTRUCTOR)
7326 for (elem = CONSTRUCTOR_ELTS (init);
7328 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7329 elem = TREE_CHAIN (elem))
7332 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7333 return expand_expr (fold (TREE_VALUE (elem)), target,
7336 else if (TREE_CODE (init) == STRING_CST
7337 && 0 > compare_tree_int (index,
7338 TREE_STRING_LENGTH (init)))
7340 tree type = TREE_TYPE (TREE_TYPE (init));
7341 enum machine_mode mode = TYPE_MODE (type);
7343 if (GET_MODE_CLASS (mode) == MODE_INT
7344 && GET_MODE_SIZE (mode) == 1)
7345 return gen_int_mode (TREE_STRING_POINTER (init)
7346 [TREE_INT_CST_LOW (index)], mode);
7351 goto normal_inner_ref;
7354 /* If the operand is a CONSTRUCTOR, we can just extract the
7355 appropriate field if it is present. */
7356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7360 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7361 elt = TREE_CHAIN (elt))
7362 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7363 /* We can normally use the value of the field in the
7364 CONSTRUCTOR. However, if this is a bitfield in
7365 an integral mode that we can fit in a HOST_WIDE_INT,
7366 we must mask only the number of bits in the bitfield,
7367 since this is done implicitly by the constructor. If
7368 the bitfield does not meet either of those conditions,
7369 we can't do this optimization. */
7370 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7371 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7373 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7374 <= HOST_BITS_PER_WIDE_INT))))
7376 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7377 && modifier == EXPAND_STACK_PARM)
7379 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7380 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7382 HOST_WIDE_INT bitsize
7383 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7384 enum machine_mode imode
7385 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7387 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7389 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7390 op0 = expand_and (imode, op0, op1, target);
7395 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7398 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7400 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7408 goto normal_inner_ref;
7411 case ARRAY_RANGE_REF:
7414 enum machine_mode mode1;
7415 HOST_WIDE_INT bitsize, bitpos;
7418 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7419 &mode1, &unsignedp, &volatilep);
7422 /* If we got back the original object, something is wrong. Perhaps
7423 we are evaluating an expression too early. In any event, don't
7424 infinitely recurse. */
7428 /* If TEM's type is a union of variable size, pass TARGET to the inner
7429 computation, since it will need a temporary and TARGET is known
7430 to have to do. This occurs in unchecked conversion in Ada. */
7434 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7435 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7437 && modifier != EXPAND_STACK_PARM
7438 ? target : NULL_RTX),
7440 (modifier == EXPAND_INITIALIZER
7441 || modifier == EXPAND_CONST_ADDRESS
7442 || modifier == EXPAND_STACK_PARM)
7443 ? modifier : EXPAND_NORMAL);
7445 /* If this is a constant, put it into a register if it is a
7446 legitimate constant and OFFSET is 0 and memory if it isn't. */
7447 if (CONSTANT_P (op0))
7449 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7450 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7452 op0 = force_reg (mode, op0);
7454 op0 = validize_mem (force_const_mem (mode, op0));
7457 /* Otherwise, if this object not in memory and we either have an
7458 offset or a BLKmode result, put it there. This case can't occur in
7459 C, but can in Ada if we have unchecked conversion of an expression
7460 from a scalar type to an array or record type or for an
7461 ARRAY_RANGE_REF whose type is BLKmode. */
7462 else if (GET_CODE (op0) != MEM
7464 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7466 /* If the operand is a SAVE_EXPR, we can deal with this by
7467 forcing the SAVE_EXPR into memory. */
7468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7470 put_var_into_stack (TREE_OPERAND (exp, 0),
7472 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7477 = build_qualified_type (TREE_TYPE (tem),
7478 (TYPE_QUALS (TREE_TYPE (tem))
7479 | TYPE_QUAL_CONST));
7480 rtx memloc = assign_temp (nt, 1, 1, 1);
7482 emit_move_insn (memloc, op0);
7489 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7492 if (GET_CODE (op0) != MEM)
7495 #ifdef POINTERS_EXTEND_UNSIGNED
7496 if (GET_MODE (offset_rtx) != Pmode)
7497 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7499 if (GET_MODE (offset_rtx) != ptr_mode)
7500 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7503 /* A constant address in OP0 can have VOIDmode, we must not try
7504 to call force_reg for that case. Avoid that case. */
7505 if (GET_CODE (op0) == MEM
7506 && GET_MODE (op0) == BLKmode
7507 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7509 && (bitpos % bitsize) == 0
7510 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7511 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7513 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7517 op0 = offset_address (op0, offset_rtx,
7518 highest_pow2_factor (offset));
7521 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7522 record its alignment as BIGGEST_ALIGNMENT. */
7523 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7524 && is_aligning_offset (offset, tem))
7525 set_mem_align (op0, BIGGEST_ALIGNMENT);
7527 /* Don't forget about volatility even if this is a bitfield. */
7528 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7530 if (op0 == orig_op0)
7531 op0 = copy_rtx (op0);
7533 MEM_VOLATILE_P (op0) = 1;
7536 /* The following code doesn't handle CONCAT.
7537 Assume only bitpos == 0 can be used for CONCAT, due to
7538 one element arrays having the same mode as its element. */
7539 if (GET_CODE (op0) == CONCAT)
7541 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7546 /* In cases where an aligned union has an unaligned object
7547 as a field, we might be extracting a BLKmode value from
7548 an integer-mode (e.g., SImode) object. Handle this case
7549 by doing the extract into an object as wide as the field
7550 (which we know to be the width of a basic mode), then
7551 storing into memory, and changing the mode to BLKmode. */
7552 if (mode1 == VOIDmode
7553 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7554 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7555 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7556 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7557 && modifier != EXPAND_CONST_ADDRESS
7558 && modifier != EXPAND_INITIALIZER)
7559 /* If the field isn't aligned enough to fetch as a memref,
7560 fetch it as a bit field. */
7561 || (mode1 != BLKmode
7562 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7563 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7564 && ((modifier == EXPAND_CONST_ADDRESS
7565 || modifier == EXPAND_INITIALIZER)
7567 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7568 || (bitpos % BITS_PER_UNIT != 0)))
7569 /* If the type and the field are a constant size and the
7570 size of the type isn't the same size as the bitfield,
7571 we must use bitfield operations. */
7573 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7575 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7578 enum machine_mode ext_mode = mode;
7580 if (ext_mode == BLKmode
7581 && ! (target != 0 && GET_CODE (op0) == MEM
7582 && GET_CODE (target) == MEM
7583 && bitpos % BITS_PER_UNIT == 0))
7584 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7586 if (ext_mode == BLKmode)
7588 /* In this case, BITPOS must start at a byte boundary and
7589 TARGET, if specified, must be a MEM. */
7590 if (GET_CODE (op0) != MEM
7591 || (target != 0 && GET_CODE (target) != MEM)
7592 || bitpos % BITS_PER_UNIT != 0)
7595 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7597 target = assign_temp (type, 0, 1, 1);
7599 emit_block_move (target, op0,
7600 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7602 (modifier == EXPAND_STACK_PARM
7603 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7608 op0 = validize_mem (op0);
7610 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7611 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7613 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7614 (modifier == EXPAND_STACK_PARM
7615 ? NULL_RTX : target),
7617 int_size_in_bytes (TREE_TYPE (tem)));
7619 /* If the result is a record type and BITSIZE is narrower than
7620 the mode of OP0, an integral mode, and this is a big endian
7621 machine, we must put the field into the high-order bits. */
7622 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7623 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7624 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7625 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7626 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7630 if (mode == BLKmode)
7632 rtx new = assign_temp (build_qualified_type
7633 ((*lang_hooks.types.type_for_mode)
7635 TYPE_QUAL_CONST), 0, 1, 1);
7637 emit_move_insn (new, op0);
7638 op0 = copy_rtx (new);
7639 PUT_MODE (op0, BLKmode);
7640 set_mem_attributes (op0, exp, 1);
7646 /* If the result is BLKmode, use that to access the object
7648 if (mode == BLKmode)
7651 /* Get a reference to just this component. */
7652 if (modifier == EXPAND_CONST_ADDRESS
7653 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7654 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7656 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7658 if (op0 == orig_op0)
7659 op0 = copy_rtx (op0);
7661 set_mem_attributes (op0, exp, 0);
7662 if (GET_CODE (XEXP (op0, 0)) == REG)
7663 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7665 MEM_VOLATILE_P (op0) |= volatilep;
7666 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7667 || modifier == EXPAND_CONST_ADDRESS
7668 || modifier == EXPAND_INITIALIZER)
7670 else if (target == 0)
7671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7673 convert_move (target, op0, unsignedp);
7679 rtx insn, before = get_last_insn (), vtbl_ref;
7681 /* Evaluate the interior expression. */
7682 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7685 /* Get or create an instruction off which to hang a note. */
7686 if (REG_P (subtarget))
7689 insn = get_last_insn ();
7692 if (! INSN_P (insn))
7693 insn = prev_nonnote_insn (insn);
7697 target = gen_reg_rtx (GET_MODE (subtarget));
7698 insn = emit_move_insn (target, subtarget);
7701 /* Collect the data for the note. */
7702 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7703 vtbl_ref = plus_constant (vtbl_ref,
7704 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7705 /* Discard the initial CONST that was added. */
7706 vtbl_ref = XEXP (vtbl_ref, 0);
7709 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7714 /* Intended for a reference to a buffer of a file-object in Pascal.
7715 But it's not certain that a special tree code will really be
7716 necessary for these. INDIRECT_REF might work for them. */
7722 /* Pascal set IN expression.
7725 rlo = set_low - (set_low%bits_per_word);
7726 the_word = set [ (index - rlo)/bits_per_word ];
7727 bit_index = index % bits_per_word;
7728 bitmask = 1 << bit_index;
7729 return !!(the_word & bitmask); */
7731 tree set = TREE_OPERAND (exp, 0);
7732 tree index = TREE_OPERAND (exp, 1);
7733 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7734 tree set_type = TREE_TYPE (set);
7735 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7736 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7737 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7738 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7739 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7740 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7741 rtx setaddr = XEXP (setval, 0);
7742 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7744 rtx diff, quo, rem, addr, bit, result;
7746 /* If domain is empty, answer is no. Likewise if index is constant
7747 and out of bounds. */
7748 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7749 && TREE_CODE (set_low_bound) == INTEGER_CST
7750 && tree_int_cst_lt (set_high_bound, set_low_bound))
7751 || (TREE_CODE (index) == INTEGER_CST
7752 && TREE_CODE (set_low_bound) == INTEGER_CST
7753 && tree_int_cst_lt (index, set_low_bound))
7754 || (TREE_CODE (set_high_bound) == INTEGER_CST
7755 && TREE_CODE (index) == INTEGER_CST
7756 && tree_int_cst_lt (set_high_bound, index))))
7760 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7762 /* If we get here, we have to generate the code for both cases
7763 (in range and out of range). */
7765 op0 = gen_label_rtx ();
7766 op1 = gen_label_rtx ();
7768 if (! (GET_CODE (index_val) == CONST_INT
7769 && GET_CODE (lo_r) == CONST_INT))
7770 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7771 GET_MODE (index_val), iunsignedp, op1);
7773 if (! (GET_CODE (index_val) == CONST_INT
7774 && GET_CODE (hi_r) == CONST_INT))
7775 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7776 GET_MODE (index_val), iunsignedp, op1);
7778 /* Calculate the element number of bit zero in the first word
7780 if (GET_CODE (lo_r) == CONST_INT)
7781 rlow = GEN_INT (INTVAL (lo_r)
7782 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7784 rlow = expand_binop (index_mode, and_optab, lo_r,
7785 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7786 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7788 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7789 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7791 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7792 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7793 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7794 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7796 addr = memory_address (byte_mode,
7797 expand_binop (index_mode, add_optab, diff,
7798 setaddr, NULL_RTX, iunsignedp,
7801 /* Extract the bit we want to examine. */
7802 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7803 gen_rtx_MEM (byte_mode, addr),
7804 make_tree (TREE_TYPE (index), rem),
7806 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7807 GET_MODE (target) == byte_mode ? target : 0,
7808 1, OPTAB_LIB_WIDEN);
7810 if (result != target)
7811 convert_move (target, result, 1);
7813 /* Output the code to handle the out-of-range case. */
7816 emit_move_insn (target, const0_rtx);
7821 case WITH_CLEANUP_EXPR:
7822 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7824 WITH_CLEANUP_EXPR_RTL (exp)
7825 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7826 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7827 CLEANUP_EH_ONLY (exp));
7829 /* That's it for this cleanup. */
7830 TREE_OPERAND (exp, 1) = 0;
7832 return WITH_CLEANUP_EXPR_RTL (exp);
7834 case CLEANUP_POINT_EXPR:
7836 /* Start a new binding layer that will keep track of all cleanup
7837 actions to be performed. */
7838 expand_start_bindings (2);
7840 target_temp_slot_level = temp_slot_level;
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7843 /* If we're going to use this value, load it up now. */
7845 op0 = force_not_mem (op0);
7846 preserve_temp_slots (op0);
7847 expand_end_bindings (NULL_TREE, 0, 0);
7852 /* Check for a built-in function. */
7853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7854 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7856 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7858 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7859 == BUILT_IN_FRONTEND)
7860 return (*lang_hooks.expand_expr) (exp, original_target,
7863 return expand_builtin (exp, target, subtarget, tmode, ignore);
7866 return expand_call (exp, target, ignore);
7868 case NON_LVALUE_EXPR:
7871 case REFERENCE_EXPR:
7872 if (TREE_OPERAND (exp, 0) == error_mark_node)
7875 if (TREE_CODE (type) == UNION_TYPE)
7877 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7879 /* If both input and output are BLKmode, this conversion isn't doing
7880 anything except possibly changing memory attribute. */
7881 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7883 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7886 result = copy_rtx (result);
7887 set_mem_attributes (result, exp, 0);
7892 target = assign_temp (type, 0, 1, 1);
7894 if (GET_CODE (target) == MEM)
7895 /* Store data into beginning of memory target. */
7896 store_expr (TREE_OPERAND (exp, 0),
7897 adjust_address (target, TYPE_MODE (valtype), 0),
7898 modifier == EXPAND_STACK_PARM ? 2 : 0);
7900 else if (GET_CODE (target) == REG)
7901 /* Store this field into a union of the proper type. */
7902 store_field (target,
7903 MIN ((int_size_in_bytes (TREE_TYPE
7904 (TREE_OPERAND (exp, 0)))
7906 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7907 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7908 VOIDmode, 0, type, 0);
7912 /* Return the entire union. */
7916 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7918 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7921 /* If the signedness of the conversion differs and OP0 is
7922 a promoted SUBREG, clear that indication since we now
7923 have to do the proper extension. */
7924 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7925 && GET_CODE (op0) == SUBREG)
7926 SUBREG_PROMOTED_VAR_P (op0) = 0;
7931 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7932 if (GET_MODE (op0) == mode)
7935 /* If OP0 is a constant, just convert it into the proper mode. */
7936 if (CONSTANT_P (op0))
7938 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7939 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7941 if (modifier == EXPAND_INITIALIZER)
7942 return simplify_gen_subreg (mode, op0, inner_mode,
7943 subreg_lowpart_offset (mode,
7946 return convert_modes (mode, inner_mode, op0,
7947 TREE_UNSIGNED (inner_type));
7950 if (modifier == EXPAND_INITIALIZER)
7951 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7955 convert_to_mode (mode, op0,
7956 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7958 convert_move (target, op0,
7959 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7962 case VIEW_CONVERT_EXPR:
7963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7965 /* If the input and output modes are both the same, we are done.
7966 Otherwise, if neither mode is BLKmode and both are integral and within
7967 a word, we can use gen_lowpart. If neither is true, make sure the
7968 operand is in memory and convert the MEM to the new mode. */
7969 if (TYPE_MODE (type) == GET_MODE (op0))
7971 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7972 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7973 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7974 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7975 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7976 op0 = gen_lowpart (TYPE_MODE (type), op0);
7977 else if (GET_CODE (op0) != MEM)
7979 /* If the operand is not a MEM, force it into memory. Since we
7980 are going to be be changing the mode of the MEM, don't call
7981 force_const_mem for constants because we don't allow pool
7982 constants to change mode. */
7983 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7985 if (TREE_ADDRESSABLE (exp))
7988 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7990 = assign_stack_temp_for_type
7991 (TYPE_MODE (inner_type),
7992 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7994 emit_move_insn (target, op0);
7998 /* At this point, OP0 is in the correct mode. If the output type is such
7999 that the operand is known to be aligned, indicate that it is.
8000 Otherwise, we need only be concerned about alignment for non-BLKmode
8002 if (GET_CODE (op0) == MEM)
8004 op0 = copy_rtx (op0);
8006 if (TYPE_ALIGN_OK (type))
8007 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8008 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8009 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8011 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8012 HOST_WIDE_INT temp_size
8013 = MAX (int_size_in_bytes (inner_type),
8014 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8015 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8016 temp_size, 0, type);
8017 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8019 if (TREE_ADDRESSABLE (exp))
8022 if (GET_MODE (op0) == BLKmode)
8023 emit_block_move (new_with_op0_mode, op0,
8024 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8025 (modifier == EXPAND_STACK_PARM
8026 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8028 emit_move_insn (new_with_op0_mode, op0);
8033 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8039 this_optab = ! unsignedp && flag_trapv
8040 && (GET_MODE_CLASS (mode) == MODE_INT)
8041 ? addv_optab : add_optab;
8043 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8044 something else, make sure we add the register to the constant and
8045 then to the other thing. This case can occur during strength
8046 reduction and doing it this way will produce better code if the
8047 frame pointer or argument pointer is eliminated.
8049 fold-const.c will ensure that the constant is always in the inner
8050 PLUS_EXPR, so the only case we need to do anything about is if
8051 sp, ap, or fp is our second argument, in which case we must swap
8052 the innermost first argument and our second argument. */
8054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8055 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8056 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8057 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8058 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8059 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8061 tree t = TREE_OPERAND (exp, 1);
8063 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8064 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8067 /* If the result is to be ptr_mode and we are adding an integer to
8068 something, we might be forming a constant. So try to use
8069 plus_constant. If it produces a sum and we can't accept it,
8070 use force_operand. This allows P = &ARR[const] to generate
8071 efficient code on machines where a SYMBOL_REF is not a valid
8074 If this is an EXPAND_SUM call, always return the sum. */
8075 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8076 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8078 if (modifier == EXPAND_STACK_PARM)
8080 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8081 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8082 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8086 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8088 /* Use immed_double_const to ensure that the constant is
8089 truncated according to the mode of OP1, then sign extended
8090 to a HOST_WIDE_INT. Using the constant directly can result
8091 in non-canonical RTL in a 64x32 cross compile. */
8093 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8095 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8096 op1 = plus_constant (op1, INTVAL (constant_part));
8097 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8098 op1 = force_operand (op1, target);
8102 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8103 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8104 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8109 (modifier == EXPAND_INITIALIZER
8110 ? EXPAND_INITIALIZER : EXPAND_SUM));
8111 if (! CONSTANT_P (op0))
8113 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8114 VOIDmode, modifier);
8115 /* Don't go to both_summands if modifier
8116 says it's not right to return a PLUS. */
8117 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8121 /* Use immed_double_const to ensure that the constant is
8122 truncated according to the mode of OP1, then sign extended
8123 to a HOST_WIDE_INT. Using the constant directly can result
8124 in non-canonical RTL in a 64x32 cross compile. */
8126 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8128 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8129 op0 = plus_constant (op0, INTVAL (constant_part));
8130 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8131 op0 = force_operand (op0, target);
8136 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8139 /* No sense saving up arithmetic to be done
8140 if it's all in the wrong mode to form part of an address.
8141 And force_operand won't know whether to sign-extend or
8143 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8144 || mode != ptr_mode)
8146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8147 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8148 TREE_OPERAND (exp, 1), 0))
8149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8152 if (op0 == const0_rtx)
8154 if (op1 == const0_rtx)
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8160 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8161 TREE_OPERAND (exp, 1), 0))
8162 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8163 VOIDmode, modifier);
8167 /* We come here from MINUS_EXPR when the second operand is a
8170 /* Make sure any term that's a sum with a constant comes last. */
8171 if (GET_CODE (op0) == PLUS
8172 && CONSTANT_P (XEXP (op0, 1)))
8178 /* If adding to a sum including a constant,
8179 associate it to put the constant outside. */
8180 if (GET_CODE (op1) == PLUS
8181 && CONSTANT_P (XEXP (op1, 1)))
8183 rtx constant_term = const0_rtx;
8185 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8188 /* Ensure that MULT comes first if there is one. */
8189 else if (GET_CODE (op0) == MULT)
8190 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8192 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8194 /* Let's also eliminate constants from op0 if possible. */
8195 op0 = eliminate_constant_term (op0, &constant_term);
8197 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8198 their sum should be a constant. Form it into OP1, since the
8199 result we want will then be OP0 + OP1. */
8201 temp = simplify_binary_operation (PLUS, mode, constant_term,
8206 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8209 /* Put a constant term last and put a multiplication first. */
8210 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8211 temp = op1, op1 = op0, op0 = temp;
8213 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8214 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8217 /* For initializers, we are allowed to return a MINUS of two
8218 symbolic constants. Here we handle all cases when both operands
8220 /* Handle difference of two symbolic constants,
8221 for the sake of an initializer. */
8222 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8223 && really_constant_p (TREE_OPERAND (exp, 0))
8224 && really_constant_p (TREE_OPERAND (exp, 1)))
8226 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8228 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8231 /* If the last operand is a CONST_INT, use plus_constant of
8232 the negated constant. Else make the MINUS. */
8233 if (GET_CODE (op1) == CONST_INT)
8234 return plus_constant (op0, - INTVAL (op1));
8236 return gen_rtx_MINUS (mode, op0, op1);
8239 this_optab = ! unsignedp && flag_trapv
8240 && (GET_MODE_CLASS(mode) == MODE_INT)
8241 ? subv_optab : sub_optab;
8243 /* No sense saving up arithmetic to be done
8244 if it's all in the wrong mode to form part of an address.
8245 And force_operand won't know whether to sign-extend or
8247 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8248 || mode != ptr_mode)
8251 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8255 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8257 /* Convert A - const to A + (-const). */
8258 if (GET_CODE (op1) == CONST_INT)
8260 op1 = negate_rtx (mode, op1);
8267 /* If first operand is constant, swap them.
8268 Thus the following special case checks need only
8269 check the second operand. */
8270 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8272 tree t1 = TREE_OPERAND (exp, 0);
8273 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8274 TREE_OPERAND (exp, 1) = t1;
8277 /* Attempt to return something suitable for generating an
8278 indexed address, for machines that support that. */
8280 if (modifier == EXPAND_SUM && mode == ptr_mode
8281 && host_integerp (TREE_OPERAND (exp, 1), 0))
8283 tree exp1 = TREE_OPERAND (exp, 1);
8285 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8288 /* If we knew for certain that this is arithmetic for an array
8289 reference, and we knew the bounds of the array, then we could
8290 apply the distributive law across (PLUS X C) for constant C.
8291 Without such knowledge, we risk overflowing the computation
8292 when both X and C are large, but X+C isn't. */
8293 /* ??? Could perhaps special-case EXP being unsigned and C being
8294 positive. In that case we are certain that X+C is no smaller
8295 than X and so the transformed expression will overflow iff the
8296 original would have. */
8298 if (GET_CODE (op0) != REG)
8299 op0 = force_operand (op0, NULL_RTX);
8300 if (GET_CODE (op0) != REG)
8301 op0 = copy_to_mode_reg (mode, op0);
8303 return gen_rtx_MULT (mode, op0,
8304 gen_int_mode (tree_low_cst (exp1, 0),
8305 TYPE_MODE (TREE_TYPE (exp1))));
8308 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8311 if (modifier == EXPAND_STACK_PARM)
8314 /* Check for multiplying things that have been extended
8315 from a narrower type. If this machine supports multiplying
8316 in that narrower type with a result in the desired type,
8317 do it that way, and avoid the explicit type-conversion. */
8318 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8319 && TREE_CODE (type) == INTEGER_TYPE
8320 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8321 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8322 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8323 && int_fits_type_p (TREE_OPERAND (exp, 1),
8324 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8325 /* Don't use a widening multiply if a shift will do. */
8326 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8327 > HOST_BITS_PER_WIDE_INT)
8328 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8330 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8331 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8333 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8334 /* If both operands are extended, they must either both
8335 be zero-extended or both be sign-extended. */
8336 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8338 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8340 enum machine_mode innermode
8341 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8342 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8343 ? smul_widen_optab : umul_widen_optab);
8344 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8345 ? umul_widen_optab : smul_widen_optab);
8346 if (mode == GET_MODE_WIDER_MODE (innermode))
8348 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8350 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8351 NULL_RTX, VOIDmode, 0);
8352 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8356 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8357 NULL_RTX, VOIDmode, 0);
8360 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8361 && innermode == word_mode)
8364 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8365 NULL_RTX, VOIDmode, 0);
8366 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8367 op1 = convert_modes (innermode, mode,
8368 expand_expr (TREE_OPERAND (exp, 1),
8369 NULL_RTX, VOIDmode, 0),
8372 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8373 NULL_RTX, VOIDmode, 0);
8374 temp = expand_binop (mode, other_optab, op0, op1, target,
8375 unsignedp, OPTAB_LIB_WIDEN);
8376 htem = expand_mult_highpart_adjust (innermode,
8377 gen_highpart (innermode, temp),
8379 gen_highpart (innermode, temp),
8381 emit_move_insn (gen_highpart (innermode, temp), htem);
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8387 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8388 TREE_OPERAND (exp, 1), 0))
8389 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8392 return expand_mult (mode, op0, op1, target, unsignedp);
8394 case TRUNC_DIV_EXPR:
8395 case FLOOR_DIV_EXPR:
8397 case ROUND_DIV_EXPR:
8398 case EXACT_DIV_EXPR:
8399 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8401 if (modifier == EXPAND_STACK_PARM)
8403 /* Possible optimization: compute the dividend with EXPAND_SUM
8404 then if the divisor is constant can optimize the case
8405 where some terms of the dividend have coeffs divisible by it. */
8406 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8408 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8411 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8412 expensive divide. If not, combine will rebuild the original
8414 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8415 && TREE_CODE (type) == REAL_TYPE
8416 && !real_onep (TREE_OPERAND (exp, 0)))
8417 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8418 build (RDIV_EXPR, type,
8419 build_real (type, dconst1),
8420 TREE_OPERAND (exp, 1))),
8421 target, tmode, modifier);
8422 this_optab = sdiv_optab;
8425 case TRUNC_MOD_EXPR:
8426 case FLOOR_MOD_EXPR:
8428 case ROUND_MOD_EXPR:
8429 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8431 if (modifier == EXPAND_STACK_PARM)
8433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8435 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8437 case FIX_ROUND_EXPR:
8438 case FIX_FLOOR_EXPR:
8440 abort (); /* Not used for C. */
8442 case FIX_TRUNC_EXPR:
8443 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8444 if (target == 0 || modifier == EXPAND_STACK_PARM)
8445 target = gen_reg_rtx (mode);
8446 expand_fix (target, op0, unsignedp);
8450 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8451 if (target == 0 || modifier == EXPAND_STACK_PARM)
8452 target = gen_reg_rtx (mode);
8453 /* expand_float can't figure out what to do if FROM has VOIDmode.
8454 So give it the correct mode. With -O, cse will optimize this. */
8455 if (GET_MODE (op0) == VOIDmode)
8456 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8458 expand_float (target, op0,
8459 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8464 if (modifier == EXPAND_STACK_PARM)
8466 temp = expand_unop (mode,
8467 ! unsignedp && flag_trapv
8468 && (GET_MODE_CLASS(mode) == MODE_INT)
8469 ? negv_optab : neg_optab, op0, target, 0);
8475 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8476 if (modifier == EXPAND_STACK_PARM)
8479 /* Handle complex values specially. */
8480 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8481 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8482 return expand_complex_abs (mode, op0, target, unsignedp);
8484 /* Unsigned abs is simply the operand. Testing here means we don't
8485 risk generating incorrect code below. */
8486 if (TREE_UNSIGNED (type))
8489 return expand_abs (mode, op0, target, unsignedp,
8490 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8494 target = original_target;
8496 || modifier == EXPAND_STACK_PARM
8497 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8498 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8499 || GET_MODE (target) != mode
8500 || (GET_CODE (target) == REG
8501 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8502 target = gen_reg_rtx (mode);
8503 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8506 /* First try to do it with a special MIN or MAX instruction.
8507 If that does not win, use a conditional jump to select the proper
8509 this_optab = (TREE_UNSIGNED (type)
8510 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8511 : (code == MIN_EXPR ? smin_optab : smax_optab));
8513 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8518 /* At this point, a MEM target is no longer useful; we will get better
8521 if (GET_CODE (target) == MEM)
8522 target = gen_reg_rtx (mode);
8525 emit_move_insn (target, op0);
8527 op0 = gen_label_rtx ();
8529 /* If this mode is an integer too wide to compare properly,
8530 compare word by word. Rely on cse to optimize constant cases. */
8531 if (GET_MODE_CLASS (mode) == MODE_INT
8532 && ! can_compare_p (GE, mode, ccp_jump))
8534 if (code == MAX_EXPR)
8535 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8536 target, op1, NULL_RTX, op0);
8538 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8539 op1, target, NULL_RTX, op0);
8543 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8544 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8545 unsignedp, mode, NULL_RTX, NULL_RTX,
8548 emit_move_insn (target, op1);
8553 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8554 if (modifier == EXPAND_STACK_PARM)
8556 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8563 if (modifier == EXPAND_STACK_PARM)
8565 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8571 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8572 temp = expand_unop (mode, clz_optab, op0, target, 1);
8578 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8579 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8585 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8586 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8593 temp = expand_unop (mode, parity_optab, op0, target, 1);
8598 /* ??? Can optimize bitwise operations with one arg constant.
8599 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8600 and (a bitwise1 b) bitwise2 b (etc)
8601 but that is probably not worth while. */
8603 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8604 boolean values when we want in all cases to compute both of them. In
8605 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8606 as actual zero-or-1 values and then bitwise anding. In cases where
8607 there cannot be any side effects, better code would be made by
8608 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8609 how to recognize those cases. */
8611 case TRUTH_AND_EXPR:
8613 this_optab = and_optab;
8618 this_optab = ior_optab;
8621 case TRUTH_XOR_EXPR:
8623 this_optab = xor_optab;
8630 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8632 if (modifier == EXPAND_STACK_PARM)
8634 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8635 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8638 /* Could determine the answer when only additive constants differ. Also,
8639 the addition of one can be handled by changing the condition. */
8646 case UNORDERED_EXPR:
8653 temp = do_store_flag (exp,
8654 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8655 tmode != VOIDmode ? tmode : mode, 0);
8659 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8660 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8662 && GET_CODE (original_target) == REG
8663 && (GET_MODE (original_target)
8664 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8666 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8669 /* If temp is constant, we can just compute the result. */
8670 if (GET_CODE (temp) == CONST_INT)
8672 if (INTVAL (temp) != 0)
8673 emit_move_insn (target, const1_rtx);
8675 emit_move_insn (target, const0_rtx);
8680 if (temp != original_target)
8682 enum machine_mode mode1 = GET_MODE (temp);
8683 if (mode1 == VOIDmode)
8684 mode1 = tmode != VOIDmode ? tmode : mode;
8686 temp = copy_to_mode_reg (mode1, temp);
8689 op1 = gen_label_rtx ();
8690 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8691 GET_MODE (temp), unsignedp, op1);
8692 emit_move_insn (temp, const1_rtx);
8697 /* If no set-flag instruction, must generate a conditional
8698 store into a temporary variable. Drop through
8699 and handle this like && and ||. */
8701 case TRUTH_ANDIF_EXPR:
8702 case TRUTH_ORIF_EXPR:
8705 || modifier == EXPAND_STACK_PARM
8706 || ! safe_from_p (target, exp, 1)
8707 /* Make sure we don't have a hard reg (such as function's return
8708 value) live across basic blocks, if not optimizing. */
8709 || (!optimize && GET_CODE (target) == REG
8710 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8711 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8714 emit_clr_insn (target);
8716 op1 = gen_label_rtx ();
8717 jumpifnot (exp, op1);
8720 emit_0_to_1_insn (target);
8723 return ignore ? const0_rtx : target;
8725 case TRUTH_NOT_EXPR:
8726 if (modifier == EXPAND_STACK_PARM)
8728 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8729 /* The parser is careful to generate TRUTH_NOT_EXPR
8730 only with operands that are always zero or one. */
8731 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8732 target, 1, OPTAB_LIB_WIDEN);
8738 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8740 return expand_expr (TREE_OPERAND (exp, 1),
8741 (ignore ? const0_rtx : target),
8742 VOIDmode, modifier);
8745 /* If we would have a "singleton" (see below) were it not for a
8746 conversion in each arm, bring that conversion back out. */
8747 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8748 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8749 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8750 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8752 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8753 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8755 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8756 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8757 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8758 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8759 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8760 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8761 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8762 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8763 return expand_expr (build1 (NOP_EXPR, type,
8764 build (COND_EXPR, TREE_TYPE (iftrue),
8765 TREE_OPERAND (exp, 0),
8767 target, tmode, modifier);
8771 /* Note that COND_EXPRs whose type is a structure or union
8772 are required to be constructed to contain assignments of
8773 a temporary variable, so that we can evaluate them here
8774 for side effect only. If type is void, we must do likewise. */
8776 /* If an arm of the branch requires a cleanup,
8777 only that cleanup is performed. */
8780 tree binary_op = 0, unary_op = 0;
8782 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8783 convert it to our mode, if necessary. */
8784 if (integer_onep (TREE_OPERAND (exp, 1))
8785 && integer_zerop (TREE_OPERAND (exp, 2))
8786 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8790 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8795 if (modifier == EXPAND_STACK_PARM)
8797 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8798 if (GET_MODE (op0) == mode)
8802 target = gen_reg_rtx (mode);
8803 convert_move (target, op0, unsignedp);
8807 /* Check for X ? A + B : A. If we have this, we can copy A to the
8808 output and conditionally add B. Similarly for unary operations.
8809 Don't do this if X has side-effects because those side effects
8810 might affect A or B and the "?" operation is a sequence point in
8811 ANSI. (operand_equal_p tests for side effects.) */
8813 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8814 && operand_equal_p (TREE_OPERAND (exp, 2),
8815 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8816 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8817 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8818 && operand_equal_p (TREE_OPERAND (exp, 1),
8819 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8820 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8822 && operand_equal_p (TREE_OPERAND (exp, 2),
8823 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8824 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8826 && operand_equal_p (TREE_OPERAND (exp, 1),
8827 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8828 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8830 /* If we are not to produce a result, we have no target. Otherwise,
8831 if a target was specified use it; it will not be used as an
8832 intermediate target unless it is safe. If no target, use a
8837 else if (modifier == EXPAND_STACK_PARM)
8838 temp = assign_temp (type, 0, 0, 1);
8839 else if (original_target
8840 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8841 || (singleton && GET_CODE (original_target) == REG
8842 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8843 && original_target == var_rtx (singleton)))
8844 && GET_MODE (original_target) == mode
8845 #ifdef HAVE_conditional_move
8846 && (! can_conditionally_move_p (mode)
8847 || GET_CODE (original_target) == REG
8848 || TREE_ADDRESSABLE (type))
8850 && (GET_CODE (original_target) != MEM
8851 || TREE_ADDRESSABLE (type)))
8852 temp = original_target;
8853 else if (TREE_ADDRESSABLE (type))
8856 temp = assign_temp (type, 0, 0, 1);
8858 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8859 do the test of X as a store-flag operation, do this as
8860 A + ((X != 0) << log C). Similarly for other simple binary
8861 operators. Only do for C == 1 if BRANCH_COST is low. */
8862 if (temp && singleton && binary_op
8863 && (TREE_CODE (binary_op) == PLUS_EXPR
8864 || TREE_CODE (binary_op) == MINUS_EXPR
8865 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8866 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8867 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8868 : integer_onep (TREE_OPERAND (binary_op, 1)))
8869 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8873 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8874 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8875 ? addv_optab : add_optab)
8876 : TREE_CODE (binary_op) == MINUS_EXPR
8877 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8878 ? subv_optab : sub_optab)
8879 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8882 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8883 if (singleton == TREE_OPERAND (exp, 1))
8884 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8886 cond = TREE_OPERAND (exp, 0);
8888 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8890 mode, BRANCH_COST <= 1);
8892 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8893 result = expand_shift (LSHIFT_EXPR, mode, result,
8894 build_int_2 (tree_log2
8898 (safe_from_p (temp, singleton, 1)
8899 ? temp : NULL_RTX), 0);
8903 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8904 return expand_binop (mode, boptab, op1, result, temp,
8905 unsignedp, OPTAB_LIB_WIDEN);
8909 do_pending_stack_adjust ();
8911 op0 = gen_label_rtx ();
8913 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8917 /* If the target conflicts with the other operand of the
8918 binary op, we can't use it. Also, we can't use the target
8919 if it is a hard register, because evaluating the condition
8920 might clobber it. */
8922 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8923 || (GET_CODE (temp) == REG
8924 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8925 temp = gen_reg_rtx (mode);
8926 store_expr (singleton, temp,
8927 modifier == EXPAND_STACK_PARM ? 2 : 0);
8930 expand_expr (singleton,
8931 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8932 if (singleton == TREE_OPERAND (exp, 1))
8933 jumpif (TREE_OPERAND (exp, 0), op0);
8935 jumpifnot (TREE_OPERAND (exp, 0), op0);
8937 start_cleanup_deferral ();
8938 if (binary_op && temp == 0)
8939 /* Just touch the other operand. */
8940 expand_expr (TREE_OPERAND (binary_op, 1),
8941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8943 store_expr (build (TREE_CODE (binary_op), type,
8944 make_tree (type, temp),
8945 TREE_OPERAND (binary_op, 1)),
8946 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8948 store_expr (build1 (TREE_CODE (unary_op), type,
8949 make_tree (type, temp)),
8950 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8953 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8954 comparison operator. If we have one of these cases, set the
8955 output to A, branch on A (cse will merge these two references),
8956 then set the output to FOO. */
8958 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8959 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8960 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8961 TREE_OPERAND (exp, 1), 0)
8962 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8963 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8964 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8966 if (GET_CODE (temp) == REG
8967 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8968 temp = gen_reg_rtx (mode);
8969 store_expr (TREE_OPERAND (exp, 1), temp,
8970 modifier == EXPAND_STACK_PARM ? 2 : 0);
8971 jumpif (TREE_OPERAND (exp, 0), op0);
8973 start_cleanup_deferral ();
8974 store_expr (TREE_OPERAND (exp, 2), temp,
8975 modifier == EXPAND_STACK_PARM ? 2 : 0);
8979 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8980 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8981 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8982 TREE_OPERAND (exp, 2), 0)
8983 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8984 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8985 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8987 if (GET_CODE (temp) == REG
8988 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8989 temp = gen_reg_rtx (mode);
8990 store_expr (TREE_OPERAND (exp, 2), temp,
8991 modifier == EXPAND_STACK_PARM ? 2 : 0);
8992 jumpifnot (TREE_OPERAND (exp, 0), op0);
8994 start_cleanup_deferral ();
8995 store_expr (TREE_OPERAND (exp, 1), temp,
8996 modifier == EXPAND_STACK_PARM ? 2 : 0);
9001 op1 = gen_label_rtx ();
9002 jumpifnot (TREE_OPERAND (exp, 0), op0);
9004 start_cleanup_deferral ();
9006 /* One branch of the cond can be void, if it never returns. For
9007 example A ? throw : E */
9009 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9010 store_expr (TREE_OPERAND (exp, 1), temp,
9011 modifier == EXPAND_STACK_PARM ? 2 : 0);
9013 expand_expr (TREE_OPERAND (exp, 1),
9014 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9015 end_cleanup_deferral ();
9017 emit_jump_insn (gen_jump (op1));
9020 start_cleanup_deferral ();
9022 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9023 store_expr (TREE_OPERAND (exp, 2), temp,
9024 modifier == EXPAND_STACK_PARM ? 2 : 0);
9026 expand_expr (TREE_OPERAND (exp, 2),
9027 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9030 end_cleanup_deferral ();
9041 /* Something needs to be initialized, but we didn't know
9042 where that thing was when building the tree. For example,
9043 it could be the return value of a function, or a parameter
9044 to a function which lays down in the stack, or a temporary
9045 variable which must be passed by reference.
9047 We guarantee that the expression will either be constructed
9048 or copied into our original target. */
9050 tree slot = TREE_OPERAND (exp, 0);
9051 tree cleanups = NULL_TREE;
9054 if (TREE_CODE (slot) != VAR_DECL)
9058 target = original_target;
9060 /* Set this here so that if we get a target that refers to a
9061 register variable that's already been used, put_reg_into_stack
9062 knows that it should fix up those uses. */
9063 TREE_USED (slot) = 1;
9067 if (DECL_RTL_SET_P (slot))
9069 target = DECL_RTL (slot);
9070 /* If we have already expanded the slot, so don't do
9072 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9077 target = assign_temp (type, 2, 0, 1);
9078 /* All temp slots at this level must not conflict. */
9079 preserve_temp_slots (target);
9080 SET_DECL_RTL (slot, target);
9081 if (TREE_ADDRESSABLE (slot))
9082 put_var_into_stack (slot, /*rescan=*/false);
9084 /* Since SLOT is not known to the called function
9085 to belong to its stack frame, we must build an explicit
9086 cleanup. This case occurs when we must build up a reference
9087 to pass the reference as an argument. In this case,
9088 it is very likely that such a reference need not be
9091 if (TREE_OPERAND (exp, 2) == 0)
9092 TREE_OPERAND (exp, 2)
9093 = (*lang_hooks.maybe_build_cleanup) (slot);
9094 cleanups = TREE_OPERAND (exp, 2);
9099 /* This case does occur, when expanding a parameter which
9100 needs to be constructed on the stack. The target
9101 is the actual stack address that we want to initialize.
9102 The function we call will perform the cleanup in this case. */
9104 /* If we have already assigned it space, use that space,
9105 not target that we were passed in, as our target
9106 parameter is only a hint. */
9107 if (DECL_RTL_SET_P (slot))
9109 target = DECL_RTL (slot);
9110 /* If we have already expanded the slot, so don't do
9112 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9117 SET_DECL_RTL (slot, target);
9118 /* If we must have an addressable slot, then make sure that
9119 the RTL that we just stored in slot is OK. */
9120 if (TREE_ADDRESSABLE (slot))
9121 put_var_into_stack (slot, /*rescan=*/true);
9125 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9126 /* Mark it as expanded. */
9127 TREE_OPERAND (exp, 1) = NULL_TREE;
9129 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9131 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9138 tree lhs = TREE_OPERAND (exp, 0);
9139 tree rhs = TREE_OPERAND (exp, 1);
9141 temp = expand_assignment (lhs, rhs, ! ignore);
9147 /* If lhs is complex, expand calls in rhs before computing it.
9148 That's so we don't compute a pointer and save it over a
9149 call. If lhs is simple, compute it first so we can give it
9150 as a target if the rhs is just a call. This avoids an
9151 extra temp and copy and that prevents a partial-subsumption
9152 which makes bad code. Actually we could treat
9153 component_ref's of vars like vars. */
9155 tree lhs = TREE_OPERAND (exp, 0);
9156 tree rhs = TREE_OPERAND (exp, 1);
9160 /* Check for |= or &= of a bitfield of size one into another bitfield
9161 of size 1. In this case, (unless we need the result of the
9162 assignment) we can do this more efficiently with a
9163 test followed by an assignment, if necessary.
9165 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9166 things change so we do, this code should be enhanced to
9169 && TREE_CODE (lhs) == COMPONENT_REF
9170 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9171 || TREE_CODE (rhs) == BIT_AND_EXPR)
9172 && TREE_OPERAND (rhs, 0) == lhs
9173 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9174 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9175 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9177 rtx label = gen_label_rtx ();
9179 do_jump (TREE_OPERAND (rhs, 1),
9180 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9181 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9182 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9183 (TREE_CODE (rhs) == BIT_IOR_EXPR
9185 : integer_zero_node)),
9187 do_pending_stack_adjust ();
9192 temp = expand_assignment (lhs, rhs, ! ignore);
9198 if (!TREE_OPERAND (exp, 0))
9199 expand_null_return ();
9201 expand_return (TREE_OPERAND (exp, 0));
9204 case PREINCREMENT_EXPR:
9205 case PREDECREMENT_EXPR:
9206 return expand_increment (exp, 0, ignore);
9208 case POSTINCREMENT_EXPR:
9209 case POSTDECREMENT_EXPR:
9210 /* Faster to treat as pre-increment if result is not used. */
9211 return expand_increment (exp, ! ignore, ignore);
9214 if (modifier == EXPAND_STACK_PARM)
9216 /* Are we taking the address of a nested function? */
9217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9218 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9219 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9220 && ! TREE_STATIC (exp))
9222 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9223 op0 = force_operand (op0, target);
9225 /* If we are taking the address of something erroneous, just
9227 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9229 /* If we are taking the address of a constant and are at the
9230 top level, we have to use output_constant_def since we can't
9231 call force_const_mem at top level. */
9233 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9234 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9236 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9239 /* We make sure to pass const0_rtx down if we came in with
9240 ignore set, to avoid doing the cleanups twice for something. */
9241 op0 = expand_expr (TREE_OPERAND (exp, 0),
9242 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9243 (modifier == EXPAND_INITIALIZER
9244 ? modifier : EXPAND_CONST_ADDRESS));
9246 /* If we are going to ignore the result, OP0 will have been set
9247 to const0_rtx, so just return it. Don't get confused and
9248 think we are taking the address of the constant. */
9252 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9253 clever and returns a REG when given a MEM. */
9254 op0 = protect_from_queue (op0, 1);
9256 /* We would like the object in memory. If it is a constant, we can
9257 have it be statically allocated into memory. For a non-constant,
9258 we need to allocate some memory and store the value into it. */
9260 if (CONSTANT_P (op0))
9261 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9263 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9264 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9265 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9267 /* If the operand is a SAVE_EXPR, we can deal with this by
9268 forcing the SAVE_EXPR into memory. */
9269 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9271 put_var_into_stack (TREE_OPERAND (exp, 0),
9273 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9277 /* If this object is in a register, it can't be BLKmode. */
9278 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9279 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9281 if (GET_CODE (op0) == PARALLEL)
9282 /* Handle calls that pass values in multiple
9283 non-contiguous locations. The Irix 6 ABI has examples
9285 emit_group_store (memloc, op0, inner_type,
9286 int_size_in_bytes (inner_type));
9288 emit_move_insn (memloc, op0);
9294 if (GET_CODE (op0) != MEM)
9297 mark_temp_addr_taken (op0);
9298 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9300 op0 = XEXP (op0, 0);
9301 #ifdef POINTERS_EXTEND_UNSIGNED
9302 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9303 && mode == ptr_mode)
9304 op0 = convert_memory_address (ptr_mode, op0);
9309 /* If OP0 is not aligned as least as much as the type requires, we
9310 need to make a temporary, copy OP0 to it, and take the address of
9311 the temporary. We want to use the alignment of the type, not of
9312 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9313 the test for BLKmode means that can't happen. The test for
9314 BLKmode is because we never make mis-aligned MEMs with
9317 We don't need to do this at all if the machine doesn't have
9318 strict alignment. */
9319 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9320 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9322 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9324 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9327 if (TYPE_ALIGN_OK (inner_type))
9330 if (TREE_ADDRESSABLE (inner_type))
9332 /* We can't make a bitwise copy of this object, so fail. */
9333 error ("cannot take the address of an unaligned member");
9337 new = assign_stack_temp_for_type
9338 (TYPE_MODE (inner_type),
9339 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9340 : int_size_in_bytes (inner_type),
9341 1, build_qualified_type (inner_type,
9342 (TYPE_QUALS (inner_type)
9343 | TYPE_QUAL_CONST)));
9345 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9346 (modifier == EXPAND_STACK_PARM
9347 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9352 op0 = force_operand (XEXP (op0, 0), target);
9356 && GET_CODE (op0) != REG
9357 && modifier != EXPAND_CONST_ADDRESS
9358 && modifier != EXPAND_INITIALIZER
9359 && modifier != EXPAND_SUM)
9360 op0 = force_reg (Pmode, op0);
9362 if (GET_CODE (op0) == REG
9363 && ! REG_USERVAR_P (op0))
9364 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9366 #ifdef POINTERS_EXTEND_UNSIGNED
9367 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9368 && mode == ptr_mode)
9369 op0 = convert_memory_address (ptr_mode, op0);
9374 case ENTRY_VALUE_EXPR:
9377 /* COMPLEX type for Extended Pascal & Fortran */
9380 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9383 /* Get the rtx code of the operands. */
9384 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9385 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9388 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9392 /* Move the real (op0) and imaginary (op1) parts to their location. */
9393 emit_move_insn (gen_realpart (mode, target), op0);
9394 emit_move_insn (gen_imagpart (mode, target), op1);
9396 insns = get_insns ();
9399 /* Complex construction should appear as a single unit. */
9400 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9401 each with a separate pseudo as destination.
9402 It's not correct for flow to treat them as a unit. */
9403 if (GET_CODE (target) != CONCAT)
9404 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9412 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9413 return gen_realpart (mode, op0);
9416 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9417 return gen_imagpart (mode, op0);
9421 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9425 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9428 target = gen_reg_rtx (mode);
9432 /* Store the realpart and the negated imagpart to target. */
9433 emit_move_insn (gen_realpart (partmode, target),
9434 gen_realpart (partmode, op0));
9436 imag_t = gen_imagpart (partmode, target);
9437 temp = expand_unop (partmode,
9438 ! unsignedp && flag_trapv
9439 && (GET_MODE_CLASS(partmode) == MODE_INT)
9440 ? negv_optab : neg_optab,
9441 gen_imagpart (partmode, op0), imag_t, 0);
9443 emit_move_insn (imag_t, temp);
9445 insns = get_insns ();
9448 /* Conjugate should appear as a single unit
9449 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9450 each with a separate pseudo as destination.
9451 It's not correct for flow to treat them as a unit. */
9452 if (GET_CODE (target) != CONCAT)
9453 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9460 case TRY_CATCH_EXPR:
9462 tree handler = TREE_OPERAND (exp, 1);
9464 expand_eh_region_start ();
9466 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9468 expand_eh_region_end_cleanup (handler);
9473 case TRY_FINALLY_EXPR:
9475 tree try_block = TREE_OPERAND (exp, 0);
9476 tree finally_block = TREE_OPERAND (exp, 1);
9478 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9480 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9481 is not sufficient, so we cannot expand the block twice.
9482 So we play games with GOTO_SUBROUTINE_EXPR to let us
9483 expand the thing only once. */
9484 /* When not optimizing, we go ahead with this form since
9485 (1) user breakpoints operate more predictably without
9486 code duplication, and
9487 (2) we're not running any of the global optimizers
9488 that would explode in time/space with the highly
9489 connected CFG created by the indirect branching. */
9491 rtx finally_label = gen_label_rtx ();
9492 rtx done_label = gen_label_rtx ();
9493 rtx return_link = gen_reg_rtx (Pmode);
9494 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9495 (tree) finally_label, (tree) return_link);
9496 TREE_SIDE_EFFECTS (cleanup) = 1;
9498 /* Start a new binding layer that will keep track of all cleanup
9499 actions to be performed. */
9500 expand_start_bindings (2);
9501 target_temp_slot_level = temp_slot_level;
9503 expand_decl_cleanup (NULL_TREE, cleanup);
9504 op0 = expand_expr (try_block, target, tmode, modifier);
9506 preserve_temp_slots (op0);
9507 expand_end_bindings (NULL_TREE, 0, 0);
9508 emit_jump (done_label);
9509 emit_label (finally_label);
9510 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9511 emit_indirect_jump (return_link);
9512 emit_label (done_label);
9516 expand_start_bindings (2);
9517 target_temp_slot_level = temp_slot_level;
9519 expand_decl_cleanup (NULL_TREE, finally_block);
9520 op0 = expand_expr (try_block, target, tmode, modifier);
9522 preserve_temp_slots (op0);
9523 expand_end_bindings (NULL_TREE, 0, 0);
9529 case GOTO_SUBROUTINE_EXPR:
9531 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9532 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9533 rtx return_address = gen_label_rtx ();
9534 emit_move_insn (return_link,
9535 gen_rtx_LABEL_REF (Pmode, return_address));
9537 emit_label (return_address);
9542 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9545 return get_exception_pointer (cfun);
9548 /* Function descriptors are not valid except for as
9549 initialization constants, and should not be expanded. */
9553 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9556 /* Here to do an ordinary binary operator, generating an instruction
9557 from the optab already placed in `this_optab'. */
9559 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9562 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9564 if (modifier == EXPAND_STACK_PARM)
9566 temp = expand_binop (mode, this_optab, op0, op1, target,
9567 unsignedp, OPTAB_LIB_WIDEN);
9573 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9574 when applied to the address of EXP produces an address known to be
9575 aligned more than BIGGEST_ALIGNMENT. */
9578 is_aligning_offset (tree offset, tree exp)
9580 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9581 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9582 || TREE_CODE (offset) == NOP_EXPR
9583 || TREE_CODE (offset) == CONVERT_EXPR
9584 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9585 offset = TREE_OPERAND (offset, 0);
9587 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9588 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9589 if (TREE_CODE (offset) != BIT_AND_EXPR
9590 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9591 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9592 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9595 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9596 It must be NEGATE_EXPR. Then strip any more conversions. */
9597 offset = TREE_OPERAND (offset, 0);
9598 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9599 || TREE_CODE (offset) == NOP_EXPR
9600 || TREE_CODE (offset) == CONVERT_EXPR)
9601 offset = TREE_OPERAND (offset, 0);
9603 if (TREE_CODE (offset) != NEGATE_EXPR)
9606 offset = TREE_OPERAND (offset, 0);
9607 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9608 || TREE_CODE (offset) == NOP_EXPR
9609 || TREE_CODE (offset) == CONVERT_EXPR)
9610 offset = TREE_OPERAND (offset, 0);
9612 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9613 whose type is the same as EXP. */
9614 return (TREE_CODE (offset) == ADDR_EXPR
9615 && (TREE_OPERAND (offset, 0) == exp
9616 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9617 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9618 == TREE_TYPE (exp)))));
9621 /* Return the tree node if an ARG corresponds to a string constant or zero
9622 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9623 in bytes within the string that ARG is accessing. The type of the
9624 offset will be `sizetype'. */
9627 string_constant (tree arg, tree *ptr_offset)
9631 if (TREE_CODE (arg) == ADDR_EXPR
9632 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9634 *ptr_offset = size_zero_node;
9635 return TREE_OPERAND (arg, 0);
9637 else if (TREE_CODE (arg) == PLUS_EXPR)
9639 tree arg0 = TREE_OPERAND (arg, 0);
9640 tree arg1 = TREE_OPERAND (arg, 1);
9645 if (TREE_CODE (arg0) == ADDR_EXPR
9646 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9648 *ptr_offset = convert (sizetype, arg1);
9649 return TREE_OPERAND (arg0, 0);
9651 else if (TREE_CODE (arg1) == ADDR_EXPR
9652 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9654 *ptr_offset = convert (sizetype, arg0);
9655 return TREE_OPERAND (arg1, 0);
9662 /* Expand code for a post- or pre- increment or decrement
9663 and return the RTX for the result.
9664 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9667 expand_increment (tree exp, int post, int ignore)
9671 tree incremented = TREE_OPERAND (exp, 0);
9672 optab this_optab = add_optab;
9674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9675 int op0_is_copy = 0;
9676 int single_insn = 0;
9677 /* 1 means we can't store into OP0 directly,
9678 because it is a subreg narrower than a word,
9679 and we don't dare clobber the rest of the word. */
9682 /* Stabilize any component ref that might need to be
9683 evaluated more than once below. */
9685 || TREE_CODE (incremented) == BIT_FIELD_REF
9686 || (TREE_CODE (incremented) == COMPONENT_REF
9687 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9688 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9689 incremented = stabilize_reference (incremented);
9690 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9691 ones into save exprs so that they don't accidentally get evaluated
9692 more than once by the code below. */
9693 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9694 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9695 incremented = save_expr (incremented);
9697 /* Compute the operands as RTX.
9698 Note whether OP0 is the actual lvalue or a copy of it:
9699 I believe it is a copy iff it is a register or subreg
9700 and insns were generated in computing it. */
9702 temp = get_last_insn ();
9703 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9705 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9706 in place but instead must do sign- or zero-extension during assignment,
9707 so we copy it into a new register and let the code below use it as
9710 Note that we can safely modify this SUBREG since it is know not to be
9711 shared (it was made by the expand_expr call above). */
9713 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9716 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9720 else if (GET_CODE (op0) == SUBREG
9721 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9723 /* We cannot increment this SUBREG in place. If we are
9724 post-incrementing, get a copy of the old value. Otherwise,
9725 just mark that we cannot increment in place. */
9727 op0 = copy_to_reg (op0);
9732 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9733 && temp != get_last_insn ());
9734 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9736 /* Decide whether incrementing or decrementing. */
9737 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9738 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9739 this_optab = sub_optab;
9741 /* Convert decrement by a constant into a negative increment. */
9742 if (this_optab == sub_optab
9743 && GET_CODE (op1) == CONST_INT)
9745 op1 = GEN_INT (-INTVAL (op1));
9746 this_optab = add_optab;
9749 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9750 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9752 /* For a preincrement, see if we can do this with a single instruction. */
9755 icode = (int) this_optab->handlers[(int) mode].insn_code;
9756 if (icode != (int) CODE_FOR_nothing
9757 /* Make sure that OP0 is valid for operands 0 and 1
9758 of the insn we want to queue. */
9759 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9760 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9761 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9765 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9766 then we cannot just increment OP0. We must therefore contrive to
9767 increment the original value. Then, for postincrement, we can return
9768 OP0 since it is a copy of the old value. For preincrement, expand here
9769 unless we can do it with a single insn.
9771 Likewise if storing directly into OP0 would clobber high bits
9772 we need to preserve (bad_subreg). */
9773 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9775 /* This is the easiest way to increment the value wherever it is.
9776 Problems with multiple evaluation of INCREMENTED are prevented
9777 because either (1) it is a component_ref or preincrement,
9778 in which case it was stabilized above, or (2) it is an array_ref
9779 with constant index in an array in a register, which is
9780 safe to reevaluate. */
9781 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9782 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9783 ? MINUS_EXPR : PLUS_EXPR),
9786 TREE_OPERAND (exp, 1));
9788 while (TREE_CODE (incremented) == NOP_EXPR
9789 || TREE_CODE (incremented) == CONVERT_EXPR)
9791 newexp = convert (TREE_TYPE (incremented), newexp);
9792 incremented = TREE_OPERAND (incremented, 0);
9795 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9796 return post ? op0 : temp;
9801 /* We have a true reference to the value in OP0.
9802 If there is an insn to add or subtract in this mode, queue it.
9803 Queueing the increment insn avoids the register shuffling
9804 that often results if we must increment now and first save
9805 the old value for subsequent use. */
9807 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9808 op0 = stabilize (op0);
9811 icode = (int) this_optab->handlers[(int) mode].insn_code;
9812 if (icode != (int) CODE_FOR_nothing
9813 /* Make sure that OP0 is valid for operands 0 and 1
9814 of the insn we want to queue. */
9815 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9816 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9818 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9819 op1 = force_reg (mode, op1);
9821 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9823 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9825 rtx addr = (general_operand (XEXP (op0, 0), mode)
9826 ? force_reg (Pmode, XEXP (op0, 0))
9827 : copy_to_reg (XEXP (op0, 0)));
9830 op0 = replace_equiv_address (op0, addr);
9831 temp = force_reg (GET_MODE (op0), op0);
9832 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9833 op1 = force_reg (mode, op1);
9835 /* The increment queue is LIFO, thus we have to `queue'
9836 the instructions in reverse order. */
9837 enqueue_insn (op0, gen_move_insn (op0, temp));
9838 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9843 /* Preincrement, or we can't increment with one simple insn. */
9845 /* Save a copy of the value before inc or dec, to return it later. */
9846 temp = value = copy_to_reg (op0);
9848 /* Arrange to return the incremented value. */
9849 /* Copy the rtx because expand_binop will protect from the queue,
9850 and the results of that would be invalid for us to return
9851 if our caller does emit_queue before using our result. */
9852 temp = copy_rtx (value = op0);
9854 /* Increment however we can. */
9855 op1 = expand_binop (mode, this_optab, value, op1, op0,
9856 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9858 /* Make sure the value is stored into OP0. */
9860 emit_move_insn (op0, op1);
9865 /* Generate code to calculate EXP using a store-flag instruction
9866 and return an rtx for the result. EXP is either a comparison
9867 or a TRUTH_NOT_EXPR whose operand is a comparison.
9869 If TARGET is nonzero, store the result there if convenient.
9871 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9874 Return zero if there is no suitable set-flag instruction
9875 available on this machine.
9877 Once expand_expr has been called on the arguments of the comparison,
9878 we are committed to doing the store flag, since it is not safe to
9879 re-evaluate the expression. We emit the store-flag insn by calling
9880 emit_store_flag, but only expand the arguments if we have a reason
9881 to believe that emit_store_flag will be successful. If we think that
9882 it will, but it isn't, we have to simulate the store-flag with a
9883 set/jump/set sequence. */
9886 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9889 tree arg0, arg1, type;
9891 enum machine_mode operand_mode;
9895 enum insn_code icode;
9896 rtx subtarget = target;
9899 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9900 result at the end. We can't simply invert the test since it would
9901 have already been inverted if it were valid. This case occurs for
9902 some floating-point comparisons. */
9904 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9905 invert = 1, exp = TREE_OPERAND (exp, 0);
9907 arg0 = TREE_OPERAND (exp, 0);
9908 arg1 = TREE_OPERAND (exp, 1);
9910 /* Don't crash if the comparison was erroneous. */
9911 if (arg0 == error_mark_node || arg1 == error_mark_node)
9914 type = TREE_TYPE (arg0);
9915 operand_mode = TYPE_MODE (type);
9916 unsignedp = TREE_UNSIGNED (type);
9918 /* We won't bother with BLKmode store-flag operations because it would mean
9919 passing a lot of information to emit_store_flag. */
9920 if (operand_mode == BLKmode)
9923 /* We won't bother with store-flag operations involving function pointers
9924 when function pointers must be canonicalized before comparisons. */
9925 #ifdef HAVE_canonicalize_funcptr_for_compare
9926 if (HAVE_canonicalize_funcptr_for_compare
9927 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9928 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9930 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9932 == FUNCTION_TYPE))))
9939 /* Get the rtx comparison code to use. We know that EXP is a comparison
9940 operation of some type. Some comparisons against 1 and -1 can be
9941 converted to comparisons with zero. Do so here so that the tests
9942 below will be aware that we have a comparison with zero. These
9943 tests will not catch constants in the first operand, but constants
9944 are rarely passed as the first operand. */
9946 switch (TREE_CODE (exp))
9955 if (integer_onep (arg1))
9956 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9958 code = unsignedp ? LTU : LT;
9961 if (! unsignedp && integer_all_onesp (arg1))
9962 arg1 = integer_zero_node, code = LT;
9964 code = unsignedp ? LEU : LE;
9967 if (! unsignedp && integer_all_onesp (arg1))
9968 arg1 = integer_zero_node, code = GE;
9970 code = unsignedp ? GTU : GT;
9973 if (integer_onep (arg1))
9974 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9976 code = unsignedp ? GEU : GE;
9979 case UNORDERED_EXPR:
10005 /* Put a constant second. */
10006 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10008 tem = arg0; arg0 = arg1; arg1 = tem;
10009 code = swap_condition (code);
10012 /* If this is an equality or inequality test of a single bit, we can
10013 do this by shifting the bit being tested to the low-order bit and
10014 masking the result with the constant 1. If the condition was EQ,
10015 we xor it with 1. This does not require an scc insn and is faster
10016 than an scc insn even if we have it.
10018 The code to make this transformation was moved into fold_single_bit_test,
10019 so we just call into the folder and expand its result. */
10021 if ((code == NE || code == EQ)
10022 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10023 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10025 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10026 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10028 target, VOIDmode, EXPAND_NORMAL);
10031 /* Now see if we are likely to be able to do this. Return if not. */
10032 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10035 icode = setcc_gen_code[(int) code];
10036 if (icode == CODE_FOR_nothing
10037 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10039 /* We can only do this if it is one of the special cases that
10040 can be handled without an scc insn. */
10041 if ((code == LT && integer_zerop (arg1))
10042 || (! only_cheap && code == GE && integer_zerop (arg1)))
10044 else if (BRANCH_COST >= 0
10045 && ! only_cheap && (code == NE || code == EQ)
10046 && TREE_CODE (type) != REAL_TYPE
10047 && ((abs_optab->handlers[(int) operand_mode].insn_code
10048 != CODE_FOR_nothing)
10049 || (ffs_optab->handlers[(int) operand_mode].insn_code
10050 != CODE_FOR_nothing)))
10056 if (! get_subtarget (target)
10057 || GET_MODE (subtarget) != operand_mode
10058 || ! safe_from_p (subtarget, arg1, 1))
10061 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10062 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10065 target = gen_reg_rtx (mode);
10067 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10068 because, if the emit_store_flag does anything it will succeed and
10069 OP0 and OP1 will not be used subsequently. */
10071 result = emit_store_flag (target, code,
10072 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10073 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10074 operand_mode, unsignedp, 1);
10079 result = expand_binop (mode, xor_optab, result, const1_rtx,
10080 result, 0, OPTAB_LIB_WIDEN);
10084 /* If this failed, we have to do this with set/compare/jump/set code. */
10085 if (GET_CODE (target) != REG
10086 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10087 target = gen_reg_rtx (GET_MODE (target));
10089 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10090 result = compare_from_rtx (op0, op1, code, unsignedp,
10091 operand_mode, NULL_RTX);
10092 if (GET_CODE (result) == CONST_INT)
10093 return (((result == const0_rtx && ! invert)
10094 || (result != const0_rtx && invert))
10095 ? const0_rtx : const1_rtx);
10097 /* The code of RESULT may not match CODE if compare_from_rtx
10098 decided to swap its operands and reverse the original code.
10100 We know that compare_from_rtx returns either a CONST_INT or
10101 a new comparison code, so it is safe to just extract the
10102 code from RESULT. */
10103 code = GET_CODE (result);
10105 label = gen_label_rtx ();
10106 if (bcc_gen_fctn[(int) code] == 0)
10109 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10110 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10111 emit_label (label);
10117 /* Stubs in case we haven't got a casesi insn. */
10118 #ifndef HAVE_casesi
10119 # define HAVE_casesi 0
10120 # define gen_casesi(a, b, c, d, e) (0)
10121 # define CODE_FOR_casesi CODE_FOR_nothing
10124 /* If the machine does not have a case insn that compares the bounds,
10125 this means extra overhead for dispatch tables, which raises the
10126 threshold for using them. */
10127 #ifndef CASE_VALUES_THRESHOLD
10128 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10129 #endif /* CASE_VALUES_THRESHOLD */
10132 case_values_threshold (void)
10134 return CASE_VALUES_THRESHOLD;
10137 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10138 0 otherwise (i.e. if there is no casesi instruction). */
10140 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10141 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10143 enum machine_mode index_mode = SImode;
10144 int index_bits = GET_MODE_BITSIZE (index_mode);
10145 rtx op1, op2, index;
10146 enum machine_mode op_mode;
10151 /* Convert the index to SImode. */
10152 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10154 enum machine_mode omode = TYPE_MODE (index_type);
10155 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10157 /* We must handle the endpoints in the original mode. */
10158 index_expr = build (MINUS_EXPR, index_type,
10159 index_expr, minval);
10160 minval = integer_zero_node;
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10162 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10163 omode, 1, default_label);
10164 /* Now we can safely truncate. */
10165 index = convert_to_mode (index_mode, index, 0);
10169 if (TYPE_MODE (index_type) != index_mode)
10171 index_expr = convert ((*lang_hooks.types.type_for_size)
10172 (index_bits, 0), index_expr);
10173 index_type = TREE_TYPE (index_expr);
10176 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10179 index = protect_from_queue (index, 0);
10180 do_pending_stack_adjust ();
10182 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10183 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10185 index = copy_to_mode_reg (op_mode, index);
10187 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10189 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10190 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10191 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10192 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10194 op1 = copy_to_mode_reg (op_mode, op1);
10196 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10198 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10199 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10200 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10201 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10203 op2 = copy_to_mode_reg (op_mode, op2);
10205 emit_jump_insn (gen_casesi (index, op1, op2,
10206 table_label, default_label));
10210 /* Attempt to generate a tablejump instruction; same concept. */
10211 #ifndef HAVE_tablejump
10212 #define HAVE_tablejump 0
10213 #define gen_tablejump(x, y) (0)
10216 /* Subroutine of the next function.
10218 INDEX is the value being switched on, with the lowest value
10219 in the table already subtracted.
10220 MODE is its expected mode (needed if INDEX is constant).
10221 RANGE is the length of the jump table.
10222 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10224 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10225 index value is out of range. */
10228 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10233 if (INTVAL (range) > cfun->max_jumptable_ents)
10234 cfun->max_jumptable_ents = INTVAL (range);
10236 /* Do an unsigned comparison (in the proper mode) between the index
10237 expression and the value which represents the length of the range.
10238 Since we just finished subtracting the lower bound of the range
10239 from the index expression, this comparison allows us to simultaneously
10240 check that the original index expression value is both greater than
10241 or equal to the minimum value of the range and less than or equal to
10242 the maximum value of the range. */
10244 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10247 /* If index is in range, it must fit in Pmode.
10248 Convert to Pmode so we can index with it. */
10250 index = convert_to_mode (Pmode, index, 1);
10252 /* Don't let a MEM slip thru, because then INDEX that comes
10253 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10254 and break_out_memory_refs will go to work on it and mess it up. */
10255 #ifdef PIC_CASE_VECTOR_ADDRESS
10256 if (flag_pic && GET_CODE (index) != REG)
10257 index = copy_to_mode_reg (Pmode, index);
10260 /* If flag_force_addr were to affect this address
10261 it could interfere with the tricky assumptions made
10262 about addresses that contain label-refs,
10263 which may be valid only very near the tablejump itself. */
10264 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10265 GET_MODE_SIZE, because this indicates how large insns are. The other
10266 uses should all be Pmode, because they are addresses. This code
10267 could fail if addresses and insns are not the same size. */
10268 index = gen_rtx_PLUS (Pmode,
10269 gen_rtx_MULT (Pmode, index,
10270 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10271 gen_rtx_LABEL_REF (Pmode, table_label));
10272 #ifdef PIC_CASE_VECTOR_ADDRESS
10274 index = PIC_CASE_VECTOR_ADDRESS (index);
10277 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10278 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10279 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10280 RTX_UNCHANGING_P (vector) = 1;
10281 MEM_NOTRAP_P (vector) = 1;
10282 convert_move (temp, vector, 0);
10284 emit_jump_insn (gen_tablejump (temp, table_label));
10286 /* If we are generating PIC code or if the table is PC-relative, the
10287 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10288 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10293 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10294 rtx table_label, rtx default_label)
10298 if (! HAVE_tablejump)
10301 index_expr = fold (build (MINUS_EXPR, index_type,
10302 convert (index_type, index_expr),
10303 convert (index_type, minval)));
10304 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10306 index = protect_from_queue (index, 0);
10307 do_pending_stack_adjust ();
10309 do_tablejump (index, TYPE_MODE (index_type),
10310 convert_modes (TYPE_MODE (index_type),
10311 TYPE_MODE (TREE_TYPE (range)),
10312 expand_expr (range, NULL_RTX,
10314 TREE_UNSIGNED (TREE_TYPE (range))),
10315 table_label, default_label);
10319 /* Nonzero if the mode is a valid vector mode for this architecture.
10320 This returns nonzero even if there is no hardware support for the
10321 vector mode, but we can emulate with narrower modes. */
10324 vector_mode_valid_p (enum machine_mode mode)
10326 enum mode_class class = GET_MODE_CLASS (mode);
10327 enum machine_mode innermode;
10329 /* Doh! What's going on? */
10330 if (class != MODE_VECTOR_INT
10331 && class != MODE_VECTOR_FLOAT)
10334 /* Hardware support. Woo hoo! */
10335 if (VECTOR_MODE_SUPPORTED_P (mode))
10338 innermode = GET_MODE_INNER (mode);
10340 /* We should probably return 1 if requesting V4DI and we have no DI,
10341 but we have V2DI, but this is probably very unlikely. */
10343 /* If we have support for the inner mode, we can safely emulate it.
10344 We may not have V2DI, but me can emulate with a pair of DIs. */
10345 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10348 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10350 const_vector_from_tree (tree exp)
10355 enum machine_mode inner, mode;
10357 mode = TYPE_MODE (TREE_TYPE (exp));
10359 if (is_zeros_p (exp))
10360 return CONST0_RTX (mode);
10362 units = GET_MODE_NUNITS (mode);
10363 inner = GET_MODE_INNER (mode);
10365 v = rtvec_alloc (units);
10367 link = TREE_VECTOR_CST_ELTS (exp);
10368 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10370 elt = TREE_VALUE (link);
10372 if (TREE_CODE (elt) == REAL_CST)
10373 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10376 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10377 TREE_INT_CST_HIGH (elt),
10381 /* Initialize remaining elements to 0. */
10382 for (; i < units; ++i)
10383 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10385 return gen_rtx_raw_CONST_VECTOR (mode, v);
10388 #include "gt-expr.h"