1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178 static rtx const_vector_from_tree PARAMS ((tree));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* Record for each mode whether we can float-extend from memory. */
189 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
210 /* If a clear memory operation would take CLEAR_RATIO or more simple
211 move-instruction sequences, we will do a clrstr or libcall instead. */
214 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
215 #define CLEAR_RATIO 2
217 /* If we are optimizing for space, cut down the default clear ratio. */
218 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 /* This macro is used to determine whether clear_by_pieces should be
223 called to clear storage. */
224 #ifndef CLEAR_BY_PIECES_P
225 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
226 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
229 /* This macro is used to determine whether store_by_pieces should be
230 called to "memset" storage with byte values other than zero, or
231 to "memcpy" storage when the source is a constant string. */
232 #ifndef STORE_BY_PIECES_P
233 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
236 /* This array records the insn_code of insns to perform block moves. */
237 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239 /* This array records the insn_code of insns to perform block clears. */
240 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
244 #ifndef SLOW_UNALIGNED_ACCESS
245 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
248 /* This is run once per compilation to set up which modes can be used
249 directly in memory and to initialize the block move optab. */
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
350 pending_stack_adjust = 0;
351 stack_pointer_delta = 0;
352 inhibit_defer_pop = 0;
354 apply_args_value = 0;
358 /* Small sanity check that the queue is empty at the end of a function. */
361 finish_expr_for_function ()
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 /* Queue up to increment (or change) VAR later. BODY says how:
371 BODY should be the same thing you would pass to emit_insn
372 to increment right away. It will go to emit_insn later on.
374 The value is a QUEUED expression to be used in place of VAR
375 where you want to guarantee the pre-incrementation value of VAR. */
378 enqueue_insn (var, body)
381 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
382 body, pending_chain);
383 return pending_chain;
386 /* Use protect_from_queue to convert a QUEUED expression
387 into something that you can put immediately into an instruction.
388 If the queued incrementation has not happened yet,
389 protect_from_queue returns the variable itself.
390 If the incrementation has happened, protect_from_queue returns a temp
391 that contains a copy of the old value of the variable.
393 Any time an rtx which might possibly be a QUEUED is to be put
394 into an instruction, it must be passed through protect_from_queue first.
395 QUEUED expressions are not meaningful in instructions.
397 Do not pass a value through protect_from_queue and then hold
398 on to it for a while before putting it in an instruction!
399 If the queue is flushed in between, incorrect code will result. */
402 protect_from_queue (x, modify)
406 RTX_CODE code = GET_CODE (x);
408 #if 0 /* A QUEUED can hang around after the queue is forced out. */
409 /* Shortcut for most common case. */
410 if (pending_chain == 0)
416 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
417 use of autoincrement. Make a copy of the contents of the memory
418 location rather than a copy of the address, but not if the value is
419 of mode BLKmode. Don't modify X in place since it might be
421 if (code == MEM && GET_MODE (x) != BLKmode
422 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
429 rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, new),
436 /* Copy the address into a pseudo, so that the returned value
437 remains correct across calls to emit_queue. */
438 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
441 /* Otherwise, recursively protect the subexpressions of all
442 the kinds of rtx's that can contain a QUEUED. */
445 rtx tem = protect_from_queue (XEXP (x, 0), 0);
446 if (tem != XEXP (x, 0))
452 else if (code == PLUS || code == MULT)
454 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
455 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
456 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
465 /* If the increment has not happened, use the variable itself. Copy it
466 into a new pseudo so that the value remains correct across calls to
468 if (QUEUED_INSN (x) == 0)
469 return copy_to_reg (QUEUED_VAR (x));
470 /* If the increment has happened and a pre-increment copy exists,
472 if (QUEUED_COPY (x) != 0)
473 return QUEUED_COPY (x);
474 /* The increment has happened but we haven't set up a pre-increment copy.
475 Set one up now, and use it. */
476 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
477 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
479 return QUEUED_COPY (x);
482 /* Return nonzero if X contains a QUEUED expression:
483 if it contains anything that will be altered by a queued increment.
484 We handle only combinations of MEM, PLUS, MINUS and MULT operators
485 since memory addresses generally contain only those. */
491 enum rtx_code code = GET_CODE (x);
497 return queued_subexp_p (XEXP (x, 0));
501 return (queued_subexp_p (XEXP (x, 0))
502 || queued_subexp_p (XEXP (x, 1)));
508 /* Perform all the pending incrementations. */
514 while ((p = pending_chain))
516 rtx body = QUEUED_BODY (p);
518 switch (GET_CODE (body))
526 QUEUED_INSN (p) = body;
530 #ifdef ENABLE_CHECKING
537 QUEUED_INSN (p) = emit_insn (body);
541 pending_chain = QUEUED_NEXT (p);
545 /* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
551 convert_move (to, from, unsignedp)
555 enum machine_mode to_mode = GET_MODE (to);
556 enum machine_mode from_mode = GET_MODE (from);
557 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
558 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
564 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
569 if (to_real != from_real)
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
588 emit_move_insn (to, from);
592 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
594 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
597 if (VECTOR_MODE_P (to_mode))
598 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
600 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
602 emit_move_insn (to, from);
606 if (to_real != from_real)
613 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
615 /* Try converting directly if the insn is supported. */
616 if ((code = can_extend_p (to_mode, from_mode, 0))
619 emit_unop_insn (code, to, from, UNKNOWN);
624 #ifdef HAVE_trunchfqf2
625 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 #ifdef HAVE_trunctqfqf2
632 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 #ifdef HAVE_truncsfqf2
639 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
641 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 #ifdef HAVE_truncdfqf2
646 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
648 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncxfqf2
653 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
655 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 #ifdef HAVE_trunctfqf2
660 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
662 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
667 #ifdef HAVE_trunctqfhf2
668 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 #ifdef HAVE_truncsfhf2
675 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
677 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfhf2
682 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
684 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfhf2
689 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
691 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfhf2
696 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
698 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
703 #ifdef HAVE_truncsftqf2
704 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
706 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 #ifdef HAVE_truncdftqf2
711 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
713 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 #ifdef HAVE_truncxftqf2
718 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
720 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 #ifdef HAVE_trunctftqf2
725 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
727 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
732 #ifdef HAVE_truncdfsf2
733 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
735 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 #ifdef HAVE_truncxfsf2
740 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
742 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 #ifdef HAVE_trunctfsf2
747 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
749 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 #ifdef HAVE_truncxfdf2
754 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
756 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 #ifdef HAVE_trunctfdf2
761 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
763 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
775 libcall = extendsfdf2_libfunc;
779 libcall = extendsfxf2_libfunc;
783 libcall = extendsftf2_libfunc;
795 libcall = truncdfsf2_libfunc;
799 libcall = extenddfxf2_libfunc;
803 libcall = extenddftf2_libfunc;
815 libcall = truncxfsf2_libfunc;
819 libcall = truncxfdf2_libfunc;
831 libcall = trunctfsf2_libfunc;
835 libcall = trunctfdf2_libfunc;
847 if (libcall == (rtx) 0)
848 /* This conversion is not implemented yet. */
852 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
854 insns = get_insns ();
856 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
861 /* Now both modes are integers. */
863 /* Handle expanding beyond a word. */
864 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
865 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
872 enum machine_mode lowpart_mode;
873 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
875 /* Try converting directly if the insn is supported. */
876 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
879 /* If FROM is a SUBREG, put it into a register. Do this
880 so that we always generate the same set of insns for
881 better cse'ing; if an intermediate assignment occurred,
882 we won't be doing the operation directly on the SUBREG. */
883 if (optimize > 0 && GET_CODE (from) == SUBREG)
884 from = force_reg (from_mode, from);
885 emit_unop_insn (code, to, from, equiv_code);
888 /* Next, try converting via full word. */
889 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
890 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
891 != CODE_FOR_nothing))
893 if (GET_CODE (to) == REG)
894 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
895 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
896 emit_unop_insn (code, to,
897 gen_lowpart (word_mode, to), equiv_code);
901 /* No special multiword conversion insn; do it by hand. */
904 /* Since we will turn this into a no conflict block, we must ensure
905 that the source does not overlap the target. */
907 if (reg_overlap_mentioned_p (to, from))
908 from = force_reg (from_mode, from);
910 /* Get a copy of FROM widened to a word, if necessary. */
911 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
912 lowpart_mode = word_mode;
914 lowpart_mode = from_mode;
916 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
918 lowpart = gen_lowpart (lowpart_mode, to);
919 emit_move_insn (lowpart, lowfrom);
921 /* Compute the value to put in each remaining word. */
923 fill_value = const0_rtx;
928 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
929 && STORE_FLAG_VALUE == -1)
931 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
933 fill_value = gen_reg_rtx (word_mode);
934 emit_insn (gen_slt (fill_value));
940 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
941 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
943 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 /* Fill the remaining words. */
948 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
950 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
951 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (fill_value != subword)
957 emit_move_insn (subword, fill_value);
960 insns = get_insns ();
963 emit_no_conflict_block (insns, to, from, NULL_RTX,
964 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 /* Truncating multi-word to a word or less. */
969 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
970 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
972 if (!((GET_CODE (from) == MEM
973 && ! MEM_VOLATILE_P (from)
974 && direct_load[(int) to_mode]
975 && ! mode_dependent_address_p (XEXP (from, 0)))
976 || GET_CODE (from) == REG
977 || GET_CODE (from) == SUBREG))
978 from = force_reg (from_mode, from);
979 convert_move (to, gen_lowpart (word_mode, from), 0);
983 /* Handle pointer conversion. */ /* SPEE 900220. */
984 if (to_mode == PQImode)
986 if (from_mode != QImode)
987 from = convert_to_mode (QImode, from, unsignedp);
989 #ifdef HAVE_truncqipqi2
990 if (HAVE_truncqipqi2)
992 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
995 #endif /* HAVE_truncqipqi2 */
999 if (from_mode == PQImode)
1001 if (to_mode != QImode)
1003 from = convert_to_mode (QImode, from, unsignedp);
1008 #ifdef HAVE_extendpqiqi2
1009 if (HAVE_extendpqiqi2)
1011 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1014 #endif /* HAVE_extendpqiqi2 */
1019 if (to_mode == PSImode)
1021 if (from_mode != SImode)
1022 from = convert_to_mode (SImode, from, unsignedp);
1024 #ifdef HAVE_truncsipsi2
1025 if (HAVE_truncsipsi2)
1027 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1030 #endif /* HAVE_truncsipsi2 */
1034 if (from_mode == PSImode)
1036 if (to_mode != SImode)
1038 from = convert_to_mode (SImode, from, unsignedp);
1043 #ifdef HAVE_extendpsisi2
1044 if (! unsignedp && HAVE_extendpsisi2)
1046 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1049 #endif /* HAVE_extendpsisi2 */
1050 #ifdef HAVE_zero_extendpsisi2
1051 if (unsignedp && HAVE_zero_extendpsisi2)
1053 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1056 #endif /* HAVE_zero_extendpsisi2 */
1061 if (to_mode == PDImode)
1063 if (from_mode != DImode)
1064 from = convert_to_mode (DImode, from, unsignedp);
1066 #ifdef HAVE_truncdipdi2
1067 if (HAVE_truncdipdi2)
1069 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1072 #endif /* HAVE_truncdipdi2 */
1076 if (from_mode == PDImode)
1078 if (to_mode != DImode)
1080 from = convert_to_mode (DImode, from, unsignedp);
1085 #ifdef HAVE_extendpdidi2
1086 if (HAVE_extendpdidi2)
1088 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1091 #endif /* HAVE_extendpdidi2 */
1096 /* Now follow all the conversions between integers
1097 no more than a word long. */
1099 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1100 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1101 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1102 GET_MODE_BITSIZE (from_mode)))
1104 if (!((GET_CODE (from) == MEM
1105 && ! MEM_VOLATILE_P (from)
1106 && direct_load[(int) to_mode]
1107 && ! mode_dependent_address_p (XEXP (from, 0)))
1108 || GET_CODE (from) == REG
1109 || GET_CODE (from) == SUBREG))
1110 from = force_reg (from_mode, from);
1111 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1112 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1113 from = copy_to_reg (from);
1114 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 /* Handle extension. */
1119 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 /* Convert directly if that works. */
1122 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1123 != CODE_FOR_nothing)
1126 from = force_not_mem (from);
1128 emit_unop_insn (code, to, from, equiv_code);
1133 enum machine_mode intermediate;
1137 /* Search for a mode to convert via. */
1138 for (intermediate = from_mode; intermediate != VOIDmode;
1139 intermediate = GET_MODE_WIDER_MODE (intermediate))
1140 if (((can_extend_p (to_mode, intermediate, unsignedp)
1141 != CODE_FOR_nothing)
1142 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1143 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1144 GET_MODE_BITSIZE (intermediate))))
1145 && (can_extend_p (intermediate, from_mode, unsignedp)
1146 != CODE_FOR_nothing))
1148 convert_move (to, convert_to_mode (intermediate, from,
1149 unsignedp), unsignedp);
1153 /* No suitable intermediate mode.
1154 Generate what we need with shifts. */
1155 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1156 - GET_MODE_BITSIZE (from_mode), 0);
1157 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1158 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1160 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1163 emit_move_insn (to, tmp);
1168 /* Support special truncate insns for certain modes. */
1170 if (from_mode == DImode && to_mode == SImode)
1172 #ifdef HAVE_truncdisi2
1173 if (HAVE_truncdisi2)
1175 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 if (from_mode == DImode && to_mode == HImode)
1185 #ifdef HAVE_truncdihi2
1186 if (HAVE_truncdihi2)
1188 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 if (from_mode == DImode && to_mode == QImode)
1198 #ifdef HAVE_truncdiqi2
1199 if (HAVE_truncdiqi2)
1201 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 if (from_mode == SImode && to_mode == HImode)
1211 #ifdef HAVE_truncsihi2
1212 if (HAVE_truncsihi2)
1214 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 if (from_mode == SImode && to_mode == QImode)
1224 #ifdef HAVE_truncsiqi2
1225 if (HAVE_truncsiqi2)
1227 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 if (from_mode == HImode && to_mode == QImode)
1237 #ifdef HAVE_trunchiqi2
1238 if (HAVE_trunchiqi2)
1240 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 if (from_mode == TImode && to_mode == DImode)
1250 #ifdef HAVE_trunctidi2
1251 if (HAVE_trunctidi2)
1253 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 if (from_mode == TImode && to_mode == SImode)
1263 #ifdef HAVE_trunctisi2
1264 if (HAVE_trunctisi2)
1266 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 if (from_mode == TImode && to_mode == HImode)
1276 #ifdef HAVE_trunctihi2
1277 if (HAVE_trunctihi2)
1279 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 if (from_mode == TImode && to_mode == QImode)
1289 #ifdef HAVE_trunctiqi2
1290 if (HAVE_trunctiqi2)
1292 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 /* Handle truncation of volatile memrefs, and so on;
1301 the things that couldn't be truncated directly,
1302 and for which there was no special instruction. */
1303 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1305 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1306 emit_move_insn (to, temp);
1310 /* Mode combination is not recognized. */
1314 /* Return an rtx for a value that would result
1315 from converting X to mode MODE.
1316 Both X and MODE may be floating, or both integer.
1317 UNSIGNEDP is nonzero if X is an unsigned value.
1318 This can be done by referring to a part of X in place
1319 or by copying to a new temporary with conversion.
1321 This function *must not* call protect_from_queue
1322 except when putting X into an insn (in which case convert_move does it). */
1325 convert_to_mode (mode, x, unsignedp)
1326 enum machine_mode mode;
1330 return convert_modes (mode, VOIDmode, x, unsignedp);
1333 /* Return an rtx for a value that would result
1334 from converting X from mode OLDMODE to mode MODE.
1335 Both modes may be floating, or both integer.
1336 UNSIGNEDP is nonzero if X is an unsigned value.
1338 This can be done by referring to a part of X in place
1339 or by copying to a new temporary with conversion.
1341 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1343 This function *must not* call protect_from_queue
1344 except when putting X into an insn (in which case convert_move does it). */
1347 convert_modes (mode, oldmode, x, unsignedp)
1348 enum machine_mode mode, oldmode;
1354 /* If FROM is a SUBREG that indicates that we have already done at least
1355 the required extension, strip it. */
1357 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1358 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1359 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1360 x = gen_lowpart (mode, x);
1362 if (GET_MODE (x) != VOIDmode)
1363 oldmode = GET_MODE (x);
1365 if (mode == oldmode)
1368 /* There is one case that we must handle specially: If we are converting
1369 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1370 we are to interpret the constant as unsigned, gen_lowpart will do
1371 the wrong if the constant appears negative. What we want to do is
1372 make the high-order word of the constant zero, not all ones. */
1374 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1375 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1376 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1378 HOST_WIDE_INT val = INTVAL (x);
1380 if (oldmode != VOIDmode
1381 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1383 int width = GET_MODE_BITSIZE (oldmode);
1385 /* We need to zero extend VAL. */
1386 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1392 /* We can do this with a gen_lowpart if both desired and current modes
1393 are integer, and this is either a constant integer, a register, or a
1394 non-volatile MEM. Except for the constant case where MODE is no
1395 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1397 if ((GET_CODE (x) == CONST_INT
1398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1399 || (GET_MODE_CLASS (mode) == MODE_INT
1400 && GET_MODE_CLASS (oldmode) == MODE_INT
1401 && (GET_CODE (x) == CONST_DOUBLE
1402 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1403 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1404 && direct_load[(int) mode])
1405 || (GET_CODE (x) == REG
1406 && (! HARD_REGISTER_P (x)
1407 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1408 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1409 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1411 /* ?? If we don't know OLDMODE, we have to assume here that
1412 X does not need sign- or zero-extension. This may not be
1413 the case, but it's the best we can do. */
1414 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1415 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1417 HOST_WIDE_INT val = INTVAL (x);
1418 int width = GET_MODE_BITSIZE (oldmode);
1420 /* We must sign or zero-extend in this case. Start by
1421 zero-extending, then sign extend if we need to. */
1422 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1424 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1425 val |= (HOST_WIDE_INT) (-1) << width;
1427 return gen_int_mode (val, mode);
1430 return gen_lowpart (mode, x);
1433 temp = gen_reg_rtx (mode);
1434 convert_move (temp, x, unsignedp);
1438 /* This macro is used to determine what the largest unit size that
1439 move_by_pieces can use is. */
1441 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1442 move efficiently, as opposed to MOVE_MAX which is the maximum
1443 number of bytes we can move with a single instruction. */
1445 #ifndef MOVE_MAX_PIECES
1446 #define MOVE_MAX_PIECES MOVE_MAX
1449 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1450 store efficiently. Due to internal GCC limitations, this is
1451 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1452 for an immediate constant. */
1454 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1456 /* Generate several move instructions to copy LEN bytes from block FROM to
1457 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1458 and TO through protect_from_queue before calling.
1460 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1461 used to push FROM to the stack.
1463 ALIGN is maximum alignment we can assume. */
1466 move_by_pieces (to, from, len, align)
1468 unsigned HOST_WIDE_INT len;
1471 struct move_by_pieces data;
1472 rtx to_addr, from_addr = XEXP (from, 0);
1473 unsigned int max_size = MOVE_MAX_PIECES + 1;
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1478 data.from_addr = from_addr;
1481 to_addr = XEXP (to, 0);
1484 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1485 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1487 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1494 #ifdef STACK_GROWS_DOWNWARD
1500 data.to_addr = to_addr;
1503 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1504 || GET_CODE (from_addr) == POST_INC
1505 || GET_CODE (from_addr) == POST_DEC);
1507 data.explicit_inc_from = 0;
1508 data.explicit_inc_to = 0;
1509 if (data.reverse) data.offset = len;
1512 /* If copying requires more than two move insns,
1513 copy addresses to registers (to make displacements shorter)
1514 and use post-increment if available. */
1515 if (!(data.autinc_from && data.autinc_to)
1516 && move_by_pieces_ninsns (len, align) > 2)
1518 /* Find the mode of the largest move... */
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1524 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1526 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = -1;
1530 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1532 data.from_addr = copy_addr_to_reg (from_addr);
1533 data.autinc_from = 1;
1534 data.explicit_inc_from = 1;
1536 if (!data.autinc_from && CONSTANT_P (from_addr))
1537 data.from_addr = copy_addr_to_reg (from_addr);
1538 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1540 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1542 data.explicit_inc_to = -1;
1544 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1546 data.to_addr = copy_addr_to_reg (to_addr);
1548 data.explicit_inc_to = 1;
1550 if (!data.autinc_to && CONSTANT_P (to_addr))
1551 data.to_addr = copy_addr_to_reg (to_addr);
1554 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1555 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1556 align = MOVE_MAX * BITS_PER_UNIT;
1558 /* First move what we can in the largest integer mode, then go to
1559 successively smaller modes. */
1561 while (max_size > 1)
1563 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1564 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1565 if (GET_MODE_SIZE (tmode) < max_size)
1568 if (mode == VOIDmode)
1571 icode = mov_optab->handlers[(int) mode].insn_code;
1572 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1573 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1575 max_size = GET_MODE_SIZE (mode);
1578 /* The code above should have handled everything. */
1583 /* Return number of insns required to move L bytes by pieces.
1584 ALIGN (in bits) is maximum alignment we can assume. */
1586 static unsigned HOST_WIDE_INT
1587 move_by_pieces_ninsns (l, align)
1588 unsigned HOST_WIDE_INT l;
1591 unsigned HOST_WIDE_INT n_insns = 0;
1592 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1594 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1595 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1596 align = MOVE_MAX * BITS_PER_UNIT;
1598 while (max_size > 1)
1600 enum machine_mode mode = VOIDmode, tmode;
1601 enum insn_code icode;
1603 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1604 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1605 if (GET_MODE_SIZE (tmode) < max_size)
1608 if (mode == VOIDmode)
1611 icode = mov_optab->handlers[(int) mode].insn_code;
1612 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1613 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1615 max_size = GET_MODE_SIZE (mode);
1623 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1624 with move instructions for mode MODE. GENFUN is the gen_... function
1625 to make a move insn for that mode. DATA has all the other info. */
1628 move_by_pieces_1 (genfun, mode, data)
1629 rtx (*genfun) PARAMS ((rtx, ...));
1630 enum machine_mode mode;
1631 struct move_by_pieces *data;
1633 unsigned int size = GET_MODE_SIZE (mode);
1634 rtx to1 = NULL_RTX, from1;
1636 while (data->len >= size)
1639 data->offset -= size;
1643 if (data->autinc_to)
1644 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1647 to1 = adjust_address (data->to, mode, data->offset);
1650 if (data->autinc_from)
1651 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1654 from1 = adjust_address (data->from, mode, data->offset);
1656 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1657 emit_insn (gen_add2_insn (data->to_addr,
1658 GEN_INT (-(HOST_WIDE_INT)size)));
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1660 emit_insn (gen_add2_insn (data->from_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1664 emit_insn ((*genfun) (to1, from1));
1667 #ifdef PUSH_ROUNDING
1668 emit_single_push_insn (mode, from1, NULL);
1674 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1675 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1676 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1677 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1679 if (! data->reverse)
1680 data->offset += size;
1686 /* Emit code to move a block Y to a block X. This may be done with
1687 string-move instructions, with multiple scalar move instructions,
1688 or with a library call.
1690 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1691 SIZE is an rtx that says how long they are.
1692 ALIGN is the maximum alignment we can assume they have.
1693 METHOD describes what kind of copy this is, and what mechanisms may be used.
1695 Return the address of the new block, if memcpy is called and returns it,
1699 emit_block_move (x, y, size, method)
1701 enum block_op_methods method;
1709 case BLOCK_OP_NORMAL:
1710 may_use_call = true;
1713 case BLOCK_OP_CALL_PARM:
1714 may_use_call = block_move_libcall_safe_for_call_parm ();
1716 /* Make inhibit_defer_pop nonzero around the library call
1717 to force it to pop the arguments right away. */
1721 case BLOCK_OP_NO_LIBCALL:
1722 may_use_call = false;
1729 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1731 if (GET_MODE (x) != BLKmode)
1733 if (GET_MODE (y) != BLKmode)
1736 x = protect_from_queue (x, 1);
1737 y = protect_from_queue (y, 0);
1738 size = protect_from_queue (size, 0);
1740 if (GET_CODE (x) != MEM)
1742 if (GET_CODE (y) != MEM)
1747 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1748 can be incorrect is coming from __builtin_memcpy. */
1749 if (GET_CODE (size) == CONST_INT)
1751 x = shallow_copy_rtx (x);
1752 y = shallow_copy_rtx (y);
1753 set_mem_size (x, size);
1754 set_mem_size (y, size);
1757 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1758 move_by_pieces (x, y, INTVAL (size), align);
1759 else if (emit_block_move_via_movstr (x, y, size, align))
1761 else if (may_use_call)
1762 retval = emit_block_move_via_libcall (x, y, size);
1764 emit_block_move_via_loop (x, y, size, align);
1766 if (method == BLOCK_OP_CALL_PARM)
1772 /* A subroutine of emit_block_move. Returns true if calling the
1773 block move libcall will not clobber any parameters which may have
1774 already been placed on the stack. */
1777 block_move_libcall_safe_for_call_parm ()
1783 /* Check to see whether memcpy takes all register arguments. */
1785 takes_regs_uninit, takes_regs_no, takes_regs_yes
1786 } takes_regs = takes_regs_uninit;
1790 case takes_regs_uninit:
1792 CUMULATIVE_ARGS args_so_far;
1795 fn = emit_block_move_libcall_fn (false);
1796 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1798 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1799 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1801 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1802 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1803 if (!tmp || !REG_P (tmp))
1804 goto fail_takes_regs;
1805 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1806 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1808 goto fail_takes_regs;
1810 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1813 takes_regs = takes_regs_yes;
1816 case takes_regs_yes:
1820 takes_regs = takes_regs_no;
1831 /* A subroutine of emit_block_move. Expand a movstr pattern;
1832 return true if successful. */
1835 emit_block_move_via_movstr (x, y, size, align)
1839 /* Try the most limited insn first, because there's no point
1840 including more than one in the machine description unless
1841 the more limited one has some advantage. */
1843 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1844 enum machine_mode mode;
1846 /* Since this is a move insn, we don't care about volatility. */
1849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1850 mode = GET_MODE_WIDER_MODE (mode))
1852 enum insn_code code = movstr_optab[(int) mode];
1853 insn_operand_predicate_fn pred;
1855 if (code != CODE_FOR_nothing
1856 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1857 here because if SIZE is less than the mode mask, as it is
1858 returned by the macro, it will definitely be less than the
1859 actual mode mask. */
1860 && ((GET_CODE (size) == CONST_INT
1861 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1862 <= (GET_MODE_MASK (mode) >> 1)))
1863 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1864 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1865 || (*pred) (x, BLKmode))
1866 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1867 || (*pred) (y, BLKmode))
1868 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1869 || (*pred) (opalign, VOIDmode)))
1872 rtx last = get_last_insn ();
1875 op2 = convert_to_mode (mode, size, 1);
1876 pred = insn_data[(int) code].operand[2].predicate;
1877 if (pred != 0 && ! (*pred) (op2, mode))
1878 op2 = copy_to_mode_reg (mode, op2);
1880 /* ??? When called via emit_block_move_for_call, it'd be
1881 nice if there were some way to inform the backend, so
1882 that it doesn't fail the expansion because it thinks
1883 emitting the libcall would be more efficient. */
1885 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1893 delete_insns_since (last);
1901 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1902 Return the return value from memcpy, 0 otherwise. */
1905 emit_block_move_via_libcall (dst, src, size)
1908 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1909 enum machine_mode size_mode;
1912 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1914 It is unsafe to save the value generated by protect_from_queue
1915 and reuse it later. Consider what happens if emit_queue is
1916 called before the return value from protect_from_queue is used.
1918 Expansion of the CALL_EXPR below will call emit_queue before
1919 we are finished emitting RTL for argument setup. So if we are
1920 not careful we could get the wrong value for an argument.
1922 To avoid this problem we go ahead and emit code to copy X, Y &
1923 SIZE into new pseudos. We can then place those new pseudos
1924 into an RTL_EXPR and use them later, even after a call to
1927 Note this is not strictly needed for library calls since they
1928 do not call emit_queue before loading their arguments. However,
1929 we may need to have library calls call emit_queue in the future
1930 since failing to do so could cause problems for targets which
1931 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1933 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1934 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1936 if (TARGET_MEM_FUNCTIONS)
1937 size_mode = TYPE_MODE (sizetype);
1939 size_mode = TYPE_MODE (unsigned_type_node);
1940 size = convert_to_mode (size_mode, size, 1);
1941 size = copy_to_mode_reg (size_mode, size);
1943 /* It is incorrect to use the libcall calling conventions to call
1944 memcpy in this context. This could be a user call to memcpy and
1945 the user may wish to examine the return value from memcpy. For
1946 targets where libcalls and normal calls have different conventions
1947 for returning pointers, we could end up generating incorrect code.
1949 For convenience, we generate the call to bcopy this way as well. */
1951 dst_tree = make_tree (ptr_type_node, dst);
1952 src_tree = make_tree (ptr_type_node, src);
1953 if (TARGET_MEM_FUNCTIONS)
1954 size_tree = make_tree (sizetype, size);
1956 size_tree = make_tree (unsigned_type_node, size);
1958 fn = emit_block_move_libcall_fn (true);
1959 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1960 if (TARGET_MEM_FUNCTIONS)
1962 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1963 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1971 /* Now we have to build up the CALL_EXPR itself. */
1972 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1973 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1974 call_expr, arg_list, NULL_TREE);
1975 TREE_SIDE_EFFECTS (call_expr) = 1;
1977 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1979 /* If we are initializing a readonly value, show the above call
1980 clobbered it. Otherwise, a load from it may erroneously be
1981 hoisted from a loop. */
1982 if (RTX_UNCHANGING_P (dst))
1983 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1985 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1988 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1989 for the function we use for block copies. The first time FOR_CALL
1990 is true, we call assemble_external. */
1992 static GTY(()) tree block_move_fn;
1995 emit_block_move_libcall_fn (for_call)
1998 static bool emitted_extern;
1999 tree fn = block_move_fn, args;
2003 if (TARGET_MEM_FUNCTIONS)
2005 fn = get_identifier ("memcpy");
2006 args = build_function_type_list (ptr_type_node, ptr_type_node,
2007 const_ptr_type_node, sizetype,
2012 fn = get_identifier ("bcopy");
2013 args = build_function_type_list (void_type_node, const_ptr_type_node,
2014 ptr_type_node, unsigned_type_node,
2018 fn = build_decl (FUNCTION_DECL, fn, args);
2019 DECL_EXTERNAL (fn) = 1;
2020 TREE_PUBLIC (fn) = 1;
2021 DECL_ARTIFICIAL (fn) = 1;
2022 TREE_NOTHROW (fn) = 1;
2027 if (for_call && !emitted_extern)
2029 emitted_extern = true;
2030 make_decl_rtl (fn, NULL);
2031 assemble_external (fn);
2037 /* A subroutine of emit_block_move. Copy the data via an explicit
2038 loop. This is used only when libcalls are forbidden. */
2039 /* ??? It'd be nice to copy in hunks larger than QImode. */
2042 emit_block_move_via_loop (x, y, size, align)
2044 unsigned int align ATTRIBUTE_UNUSED;
2046 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2047 enum machine_mode iter_mode;
2049 iter_mode = GET_MODE (size);
2050 if (iter_mode == VOIDmode)
2051 iter_mode = word_mode;
2053 top_label = gen_label_rtx ();
2054 cmp_label = gen_label_rtx ();
2055 iter = gen_reg_rtx (iter_mode);
2057 emit_move_insn (iter, const0_rtx);
2059 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2060 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2061 do_pending_stack_adjust ();
2063 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2065 emit_jump (cmp_label);
2066 emit_label (top_label);
2068 tmp = convert_modes (Pmode, iter_mode, iter, true);
2069 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2070 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2071 x = change_address (x, QImode, x_addr);
2072 y = change_address (y, QImode, y_addr);
2074 emit_move_insn (x, y);
2076 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2077 true, OPTAB_LIB_WIDEN);
2079 emit_move_insn (iter, tmp);
2081 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2082 emit_label (cmp_label);
2084 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2087 emit_note (NULL, NOTE_INSN_LOOP_END);
2090 /* Copy all or part of a value X into registers starting at REGNO.
2091 The number of registers to be filled is NREGS. */
2094 move_block_to_reg (regno, x, nregs, mode)
2098 enum machine_mode mode;
2101 #ifdef HAVE_load_multiple
2109 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2110 x = validize_mem (force_const_mem (mode, x));
2112 /* See if the machine can do this with a load multiple insn. */
2113 #ifdef HAVE_load_multiple
2114 if (HAVE_load_multiple)
2116 last = get_last_insn ();
2117 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2125 delete_insns_since (last);
2129 for (i = 0; i < nregs; i++)
2130 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2131 operand_subword_force (x, i, mode));
2134 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2135 The number of registers to be filled is NREGS. SIZE indicates the number
2136 of bytes in the object X. */
2139 move_block_from_reg (regno, x, nregs, size)
2146 #ifdef HAVE_store_multiple
2150 enum machine_mode mode;
2155 /* If SIZE is that of a mode no bigger than a word, just use that
2156 mode's store operation. */
2157 if (size <= UNITS_PER_WORD
2158 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2160 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2164 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2165 to the left before storing to memory. Note that the previous test
2166 doesn't handle all cases (e.g. SIZE == 3). */
2167 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2169 rtx tem = operand_subword (x, 0, 1, BLKmode);
2175 shift = expand_shift (LSHIFT_EXPR, word_mode,
2176 gen_rtx_REG (word_mode, regno),
2177 build_int_2 ((UNITS_PER_WORD - size)
2178 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2179 emit_move_insn (tem, shift);
2183 /* See if the machine can do this with a store multiple insn. */
2184 #ifdef HAVE_store_multiple
2185 if (HAVE_store_multiple)
2187 last = get_last_insn ();
2188 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2196 delete_insns_since (last);
2200 for (i = 0; i < nregs; i++)
2202 rtx tem = operand_subword (x, i, 1, BLKmode);
2207 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2211 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2212 ORIG, where ORIG is a non-consecutive group of registers represented by
2213 a PARALLEL. The clone is identical to the original except in that the
2214 original set of registers is replaced by a new set of pseudo registers.
2215 The new set has the same modes as the original set. */
2218 gen_group_rtx (orig)
2224 if (GET_CODE (orig) != PARALLEL)
2227 length = XVECLEN (orig, 0);
2228 tmps = (rtx *) alloca (sizeof (rtx) * length);
2230 /* Skip a NULL entry in first slot. */
2231 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2236 for (; i < length; i++)
2238 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2239 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2241 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2244 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2247 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2248 registers represented by a PARALLEL. SSIZE represents the total size of
2249 block SRC in bytes, or -1 if not known. */
2250 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2251 the balance will be in what would be the low-order memory addresses, i.e.
2252 left justified for big endian, right justified for little endian. This
2253 happens to be true for the targets currently using this support. If this
2254 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2258 emit_group_load (dst, orig_src, ssize)
2265 if (GET_CODE (dst) != PARALLEL)
2268 /* Check for a NULL entry, used to indicate that the parameter goes
2269 both on the stack and in registers. */
2270 if (XEXP (XVECEXP (dst, 0, 0), 0))
2275 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2277 /* Process the pieces. */
2278 for (i = start; i < XVECLEN (dst, 0); i++)
2280 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2281 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2282 unsigned int bytelen = GET_MODE_SIZE (mode);
2285 /* Handle trailing fragments that run over the size of the struct. */
2286 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2288 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2289 bytelen = ssize - bytepos;
2294 /* If we won't be loading directly from memory, protect the real source
2295 from strange tricks we might play; but make sure that the source can
2296 be loaded directly into the destination. */
2298 if (GET_CODE (orig_src) != MEM
2299 && (!CONSTANT_P (orig_src)
2300 || (GET_MODE (orig_src) != mode
2301 && GET_MODE (orig_src) != VOIDmode)))
2303 if (GET_MODE (orig_src) == VOIDmode)
2304 src = gen_reg_rtx (mode);
2306 src = gen_reg_rtx (GET_MODE (orig_src));
2308 emit_move_insn (src, orig_src);
2311 /* Optimize the access just a bit. */
2312 if (GET_CODE (src) == MEM
2313 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2314 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2315 && bytelen == GET_MODE_SIZE (mode))
2317 tmps[i] = gen_reg_rtx (mode);
2318 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2320 else if (GET_CODE (src) == CONCAT)
2322 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2323 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2325 if ((bytepos == 0 && bytelen == slen0)
2326 || (bytepos != 0 && bytepos + bytelen <= slen))
2328 /* The following assumes that the concatenated objects all
2329 have the same size. In this case, a simple calculation
2330 can be used to determine the object and the bit field
2332 tmps[i] = XEXP (src, bytepos / slen0);
2333 if (! CONSTANT_P (tmps[i])
2334 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2335 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2336 (bytepos % slen0) * BITS_PER_UNIT,
2337 1, NULL_RTX, mode, mode, ssize);
2339 else if (bytepos == 0)
2341 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2342 emit_move_insn (mem, src);
2343 tmps[i] = adjust_address (mem, mode, 0);
2348 else if (CONSTANT_P (src)
2349 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2352 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2353 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2356 if (BYTES_BIG_ENDIAN && shift)
2357 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2358 tmps[i], 0, OPTAB_WIDEN);
2363 /* Copy the extracted pieces into the proper (probable) hard regs. */
2364 for (i = start; i < XVECLEN (dst, 0); i++)
2365 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2368 /* Emit code to move a block SRC to block DST, where SRC and DST are
2369 non-consecutive groups of registers, each represented by a PARALLEL. */
2372 emit_group_move (dst, src)
2377 if (GET_CODE (src) != PARALLEL
2378 || GET_CODE (dst) != PARALLEL
2379 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2382 /* Skip first entry if NULL. */
2383 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2384 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2385 XEXP (XVECEXP (src, 0, i), 0));
2388 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2389 registers represented by a PARALLEL. SSIZE represents the total size of
2390 block DST, or -1 if not known. */
2393 emit_group_store (orig_dst, src, ssize)
2400 if (GET_CODE (src) != PARALLEL)
2403 /* Check for a NULL entry, used to indicate that the parameter goes
2404 both on the stack and in registers. */
2405 if (XEXP (XVECEXP (src, 0, 0), 0))
2410 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2412 /* Copy the (probable) hard regs into pseudos. */
2413 for (i = start; i < XVECLEN (src, 0); i++)
2415 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2416 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2417 emit_move_insn (tmps[i], reg);
2421 /* If we won't be storing directly into memory, protect the real destination
2422 from strange tricks we might play. */
2424 if (GET_CODE (dst) == PARALLEL)
2428 /* We can get a PARALLEL dst if there is a conditional expression in
2429 a return statement. In that case, the dst and src are the same,
2430 so no action is necessary. */
2431 if (rtx_equal_p (dst, src))
2434 /* It is unclear if we can ever reach here, but we may as well handle
2435 it. Allocate a temporary, and split this into a store/load to/from
2438 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2439 emit_group_store (temp, src, ssize);
2440 emit_group_load (dst, temp, ssize);
2443 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2445 dst = gen_reg_rtx (GET_MODE (orig_dst));
2446 /* Make life a bit easier for combine. */
2447 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2450 /* Process the pieces. */
2451 for (i = start; i < XVECLEN (src, 0); i++)
2453 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2454 enum machine_mode mode = GET_MODE (tmps[i]);
2455 unsigned int bytelen = GET_MODE_SIZE (mode);
2458 /* Handle trailing fragments that run over the size of the struct. */
2459 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2461 if (BYTES_BIG_ENDIAN)
2463 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2464 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2465 tmps[i], 0, OPTAB_WIDEN);
2467 bytelen = ssize - bytepos;
2470 if (GET_CODE (dst) == CONCAT)
2472 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2473 dest = XEXP (dst, 0);
2474 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2477 dest = XEXP (dst, 1);
2479 else if (bytepos == 0 && XVECLEN (src, 0))
2481 dest = assign_stack_temp (GET_MODE (dest),
2482 GET_MODE_SIZE (GET_MODE (dest)), 0);
2483 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2492 /* Optimize the access just a bit. */
2493 if (GET_CODE (dest) == MEM
2494 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2495 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2496 && bytelen == GET_MODE_SIZE (mode))
2497 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2499 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2500 mode, tmps[i], ssize);
2505 /* Copy from the pseudo into the (probable) hard reg. */
2506 if (orig_dst != dst)
2507 emit_move_insn (orig_dst, dst);
2510 /* Generate code to copy a BLKmode object of TYPE out of a
2511 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2512 is null, a stack temporary is created. TGTBLK is returned.
2514 The primary purpose of this routine is to handle functions
2515 that return BLKmode structures in registers. Some machines
2516 (the PA for example) want to return all small structures
2517 in registers regardless of the structure's alignment. */
2520 copy_blkmode_from_reg (tgtblk, srcreg, type)
2525 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2526 rtx src = NULL, dst = NULL;
2527 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2528 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2532 tgtblk = assign_temp (build_qualified_type (type,
2534 | TYPE_QUAL_CONST)),
2536 preserve_temp_slots (tgtblk);
2539 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2540 into a new pseudo which is a full word. */
2542 if (GET_MODE (srcreg) != BLKmode
2543 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2544 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2546 /* Structures whose size is not a multiple of a word are aligned
2547 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2548 machine, this means we must skip the empty high order bytes when
2549 calculating the bit offset. */
2550 if (BYTES_BIG_ENDIAN
2551 && bytes % UNITS_PER_WORD)
2552 big_endian_correction
2553 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2555 /* Copy the structure BITSIZE bites at a time.
2557 We could probably emit more efficient code for machines which do not use
2558 strict alignment, but it doesn't seem worth the effort at the current
2560 for (bitpos = 0, xbitpos = big_endian_correction;
2561 bitpos < bytes * BITS_PER_UNIT;
2562 bitpos += bitsize, xbitpos += bitsize)
2564 /* We need a new source operand each time xbitpos is on a
2565 word boundary and when xbitpos == big_endian_correction
2566 (the first time through). */
2567 if (xbitpos % BITS_PER_WORD == 0
2568 || xbitpos == big_endian_correction)
2569 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2572 /* We need a new destination operand each time bitpos is on
2574 if (bitpos % BITS_PER_WORD == 0)
2575 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2577 /* Use xbitpos for the source extraction (right justified) and
2578 xbitpos for the destination store (left justified). */
2579 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2580 extract_bit_field (src, bitsize,
2581 xbitpos % BITS_PER_WORD, 1,
2582 NULL_RTX, word_mode, word_mode,
2590 /* Add a USE expression for REG to the (possibly empty) list pointed
2591 to by CALL_FUSAGE. REG must denote a hard register. */
2594 use_reg (call_fusage, reg)
2595 rtx *call_fusage, reg;
2597 if (GET_CODE (reg) != REG
2598 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2602 = gen_rtx_EXPR_LIST (VOIDmode,
2603 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2606 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2607 starting at REGNO. All of these registers must be hard registers. */
2610 use_regs (call_fusage, regno, nregs)
2617 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2620 for (i = 0; i < nregs; i++)
2621 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2624 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2625 PARALLEL REGS. This is for calls that pass values in multiple
2626 non-contiguous locations. The Irix 6 ABI has examples of this. */
2629 use_group_regs (call_fusage, regs)
2635 for (i = 0; i < XVECLEN (regs, 0); i++)
2637 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2639 /* A NULL entry means the parameter goes both on the stack and in
2640 registers. This can also be a MEM for targets that pass values
2641 partially on the stack and partially in registers. */
2642 if (reg != 0 && GET_CODE (reg) == REG)
2643 use_reg (call_fusage, reg);
2648 /* Determine whether the LEN bytes generated by CONSTFUN can be
2649 stored to memory using several move instructions. CONSTFUNDATA is
2650 a pointer which will be passed as argument in every CONSTFUN call.
2651 ALIGN is maximum alignment we can assume. Return nonzero if a
2652 call to store_by_pieces should succeed. */
2655 can_store_by_pieces (len, constfun, constfundata, align)
2656 unsigned HOST_WIDE_INT len;
2657 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2661 unsigned HOST_WIDE_INT max_size, l;
2662 HOST_WIDE_INT offset = 0;
2663 enum machine_mode mode, tmode;
2664 enum insn_code icode;
2668 if (! STORE_BY_PIECES_P (len, align))
2671 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2672 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2673 align = MOVE_MAX * BITS_PER_UNIT;
2675 /* We would first store what we can in the largest integer mode, then go to
2676 successively smaller modes. */
2679 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2684 max_size = STORE_MAX_PIECES + 1;
2685 while (max_size > 1)
2687 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2688 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2689 if (GET_MODE_SIZE (tmode) < max_size)
2692 if (mode == VOIDmode)
2695 icode = mov_optab->handlers[(int) mode].insn_code;
2696 if (icode != CODE_FOR_nothing
2697 && align >= GET_MODE_ALIGNMENT (mode))
2699 unsigned int size = GET_MODE_SIZE (mode);
2706 cst = (*constfun) (constfundata, offset, mode);
2707 if (!LEGITIMATE_CONSTANT_P (cst))
2717 max_size = GET_MODE_SIZE (mode);
2720 /* The code above should have handled everything. */
2728 /* Generate several move instructions to store LEN bytes generated by
2729 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2730 pointer which will be passed as argument in every CONSTFUN call.
2731 ALIGN is maximum alignment we can assume. */
2734 store_by_pieces (to, len, constfun, constfundata, align)
2736 unsigned HOST_WIDE_INT len;
2737 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2741 struct store_by_pieces data;
2743 if (! STORE_BY_PIECES_P (len, align))
2745 to = protect_from_queue (to, 1);
2746 data.constfun = constfun;
2747 data.constfundata = constfundata;
2750 store_by_pieces_1 (&data, align);
2753 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2754 rtx with BLKmode). The caller must pass TO through protect_from_queue
2755 before calling. ALIGN is maximum alignment we can assume. */
2758 clear_by_pieces (to, len, align)
2760 unsigned HOST_WIDE_INT len;
2763 struct store_by_pieces data;
2765 data.constfun = clear_by_pieces_1;
2766 data.constfundata = NULL;
2769 store_by_pieces_1 (&data, align);
2772 /* Callback routine for clear_by_pieces.
2773 Return const0_rtx unconditionally. */
2776 clear_by_pieces_1 (data, offset, mode)
2777 PTR data ATTRIBUTE_UNUSED;
2778 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2779 enum machine_mode mode ATTRIBUTE_UNUSED;
2784 /* Subroutine of clear_by_pieces and store_by_pieces.
2785 Generate several move instructions to store LEN bytes of block TO. (A MEM
2786 rtx with BLKmode). The caller must pass TO through protect_from_queue
2787 before calling. ALIGN is maximum alignment we can assume. */
2790 store_by_pieces_1 (data, align)
2791 struct store_by_pieces *data;
2794 rtx to_addr = XEXP (data->to, 0);
2795 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2796 enum machine_mode mode = VOIDmode, tmode;
2797 enum insn_code icode;
2800 data->to_addr = to_addr;
2802 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2803 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2805 data->explicit_inc_to = 0;
2807 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2809 data->offset = data->len;
2811 /* If storing requires more than two move insns,
2812 copy addresses to registers (to make displacements shorter)
2813 and use post-increment if available. */
2814 if (!data->autinc_to
2815 && move_by_pieces_ninsns (data->len, align) > 2)
2817 /* Determine the main mode we'll be using. */
2818 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2819 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2820 if (GET_MODE_SIZE (tmode) < max_size)
2823 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2825 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2826 data->autinc_to = 1;
2827 data->explicit_inc_to = -1;
2830 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2831 && ! data->autinc_to)
2833 data->to_addr = copy_addr_to_reg (to_addr);
2834 data->autinc_to = 1;
2835 data->explicit_inc_to = 1;
2838 if ( !data->autinc_to && CONSTANT_P (to_addr))
2839 data->to_addr = copy_addr_to_reg (to_addr);
2842 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2843 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2844 align = MOVE_MAX * BITS_PER_UNIT;
2846 /* First store what we can in the largest integer mode, then go to
2847 successively smaller modes. */
2849 while (max_size > 1)
2851 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2852 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2853 if (GET_MODE_SIZE (tmode) < max_size)
2856 if (mode == VOIDmode)
2859 icode = mov_optab->handlers[(int) mode].insn_code;
2860 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2861 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2863 max_size = GET_MODE_SIZE (mode);
2866 /* The code above should have handled everything. */
2871 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2872 with move instructions for mode MODE. GENFUN is the gen_... function
2873 to make a move insn for that mode. DATA has all the other info. */
2876 store_by_pieces_2 (genfun, mode, data)
2877 rtx (*genfun) PARAMS ((rtx, ...));
2878 enum machine_mode mode;
2879 struct store_by_pieces *data;
2881 unsigned int size = GET_MODE_SIZE (mode);
2884 while (data->len >= size)
2887 data->offset -= size;
2889 if (data->autinc_to)
2890 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2893 to1 = adjust_address (data->to, mode, data->offset);
2895 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2896 emit_insn (gen_add2_insn (data->to_addr,
2897 GEN_INT (-(HOST_WIDE_INT) size)));
2899 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2900 emit_insn ((*genfun) (to1, cst));
2902 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2903 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2905 if (! data->reverse)
2906 data->offset += size;
2912 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2913 its length in bytes. */
2916 clear_storage (object, size)
2921 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2922 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2924 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2925 just move a zero. Otherwise, do this a piece at a time. */
2926 if (GET_MODE (object) != BLKmode
2927 && GET_CODE (size) == CONST_INT
2928 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2929 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2932 object = protect_from_queue (object, 1);
2933 size = protect_from_queue (size, 0);
2935 if (GET_CODE (size) == CONST_INT
2936 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2937 clear_by_pieces (object, INTVAL (size), align);
2938 else if (clear_storage_via_clrstr (object, size, align))
2941 retval = clear_storage_via_libcall (object, size);
2947 /* A subroutine of clear_storage. Expand a clrstr pattern;
2948 return true if successful. */
2951 clear_storage_via_clrstr (object, size, align)
2955 /* Try the most limited insn first, because there's no point
2956 including more than one in the machine description unless
2957 the more limited one has some advantage. */
2959 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2960 enum machine_mode mode;
2962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2963 mode = GET_MODE_WIDER_MODE (mode))
2965 enum insn_code code = clrstr_optab[(int) mode];
2966 insn_operand_predicate_fn pred;
2968 if (code != CODE_FOR_nothing
2969 /* We don't need MODE to be narrower than
2970 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2971 the mode mask, as it is returned by the macro, it will
2972 definitely be less than the actual mode mask. */
2973 && ((GET_CODE (size) == CONST_INT
2974 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2975 <= (GET_MODE_MASK (mode) >> 1)))
2976 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2977 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2978 || (*pred) (object, BLKmode))
2979 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2980 || (*pred) (opalign, VOIDmode)))
2983 rtx last = get_last_insn ();
2986 op1 = convert_to_mode (mode, size, 1);
2987 pred = insn_data[(int) code].operand[1].predicate;
2988 if (pred != 0 && ! (*pred) (op1, mode))
2989 op1 = copy_to_mode_reg (mode, op1);
2991 pat = GEN_FCN ((int) code) (object, op1, opalign);
2998 delete_insns_since (last);
3005 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3006 Return the return value of memset, 0 otherwise. */
3009 clear_storage_via_libcall (object, size)
3012 tree call_expr, arg_list, fn, object_tree, size_tree;
3013 enum machine_mode size_mode;
3016 /* OBJECT or SIZE may have been passed through protect_from_queue.
3018 It is unsafe to save the value generated by protect_from_queue
3019 and reuse it later. Consider what happens if emit_queue is
3020 called before the return value from protect_from_queue is used.
3022 Expansion of the CALL_EXPR below will call emit_queue before
3023 we are finished emitting RTL for argument setup. So if we are
3024 not careful we could get the wrong value for an argument.
3026 To avoid this problem we go ahead and emit code to copy OBJECT
3027 and SIZE into new pseudos. We can then place those new pseudos
3028 into an RTL_EXPR and use them later, even after a call to
3031 Note this is not strictly needed for library calls since they
3032 do not call emit_queue before loading their arguments. However,
3033 we may need to have library calls call emit_queue in the future
3034 since failing to do so could cause problems for targets which
3035 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3037 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3039 if (TARGET_MEM_FUNCTIONS)
3040 size_mode = TYPE_MODE (sizetype);
3042 size_mode = TYPE_MODE (unsigned_type_node);
3043 size = convert_to_mode (size_mode, size, 1);
3044 size = copy_to_mode_reg (size_mode, size);
3046 /* It is incorrect to use the libcall calling conventions to call
3047 memset in this context. This could be a user call to memset and
3048 the user may wish to examine the return value from memset. For
3049 targets where libcalls and normal calls have different conventions
3050 for returning pointers, we could end up generating incorrect code.
3052 For convenience, we generate the call to bzero this way as well. */
3054 object_tree = make_tree (ptr_type_node, object);
3055 if (TARGET_MEM_FUNCTIONS)
3056 size_tree = make_tree (sizetype, size);
3058 size_tree = make_tree (unsigned_type_node, size);
3060 fn = clear_storage_libcall_fn (true);
3061 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3062 if (TARGET_MEM_FUNCTIONS)
3063 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3064 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3066 /* Now we have to build up the CALL_EXPR itself. */
3067 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3068 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3069 call_expr, arg_list, NULL_TREE);
3070 TREE_SIDE_EFFECTS (call_expr) = 1;
3072 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3074 /* If we are initializing a readonly value, show the above call
3075 clobbered it. Otherwise, a load from it may erroneously be
3076 hoisted from a loop. */
3077 if (RTX_UNCHANGING_P (object))
3078 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3080 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3083 /* A subroutine of clear_storage_via_libcall. Create the tree node
3084 for the function we use for block clears. The first time FOR_CALL
3085 is true, we call assemble_external. */
3087 static GTY(()) tree block_clear_fn;
3090 clear_storage_libcall_fn (for_call)
3093 static bool emitted_extern;
3094 tree fn = block_clear_fn, args;
3098 if (TARGET_MEM_FUNCTIONS)
3100 fn = get_identifier ("memset");
3101 args = build_function_type_list (ptr_type_node, ptr_type_node,
3102 integer_type_node, sizetype,
3107 fn = get_identifier ("bzero");
3108 args = build_function_type_list (void_type_node, ptr_type_node,
3109 unsigned_type_node, NULL_TREE);
3112 fn = build_decl (FUNCTION_DECL, fn, args);
3113 DECL_EXTERNAL (fn) = 1;
3114 TREE_PUBLIC (fn) = 1;
3115 DECL_ARTIFICIAL (fn) = 1;
3116 TREE_NOTHROW (fn) = 1;
3118 block_clear_fn = fn;
3121 if (for_call && !emitted_extern)
3123 emitted_extern = true;
3124 make_decl_rtl (fn, NULL);
3125 assemble_external (fn);
3131 /* Generate code to copy Y into X.
3132 Both Y and X must have the same mode, except that
3133 Y can be a constant with VOIDmode.
3134 This mode cannot be BLKmode; use emit_block_move for that.
3136 Return the last instruction emitted. */
3139 emit_move_insn (x, y)
3142 enum machine_mode mode = GET_MODE (x);
3143 rtx y_cst = NULL_RTX;
3146 x = protect_from_queue (x, 1);
3147 y = protect_from_queue (y, 0);
3149 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3152 /* Never force constant_p_rtx to memory. */
3153 if (GET_CODE (y) == CONSTANT_P_RTX)
3155 else if (CONSTANT_P (y))
3158 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3159 && (last_insn = compress_float_constant (x, y)))
3162 if (!LEGITIMATE_CONSTANT_P (y))
3165 y = force_const_mem (mode, y);
3167 /* If the target's cannot_force_const_mem prevented the spill,
3168 assume that the target's move expanders will also take care
3169 of the non-legitimate constant. */
3175 /* If X or Y are memory references, verify that their addresses are valid
3177 if (GET_CODE (x) == MEM
3178 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3179 && ! push_operand (x, GET_MODE (x)))
3181 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3182 x = validize_mem (x);
3184 if (GET_CODE (y) == MEM
3185 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3187 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3188 y = validize_mem (y);
3190 if (mode == BLKmode)
3193 last_insn = emit_move_insn_1 (x, y);
3195 if (y_cst && GET_CODE (x) == REG)
3196 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3201 /* Low level part of emit_move_insn.
3202 Called just like emit_move_insn, but assumes X and Y
3203 are basically valid. */
3206 emit_move_insn_1 (x, y)
3209 enum machine_mode mode = GET_MODE (x);
3210 enum machine_mode submode;
3211 enum mode_class class = GET_MODE_CLASS (mode);
3213 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3216 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3218 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3220 /* Expand complex moves by moving real part and imag part, if possible. */
3221 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3222 && BLKmode != (submode = GET_MODE_INNER (mode))
3223 && (mov_optab->handlers[(int) submode].insn_code
3224 != CODE_FOR_nothing))
3226 /* Don't split destination if it is a stack push. */
3227 int stack = push_operand (x, GET_MODE (x));
3229 #ifdef PUSH_ROUNDING
3230 /* In case we output to the stack, but the size is smaller machine can
3231 push exactly, we need to use move instructions. */
3233 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3234 != GET_MODE_SIZE (submode)))
3237 HOST_WIDE_INT offset1, offset2;
3239 /* Do not use anti_adjust_stack, since we don't want to update
3240 stack_pointer_delta. */
3241 temp = expand_binop (Pmode,
3242 #ifdef STACK_GROWS_DOWNWARD
3250 (GET_MODE_SIZE (GET_MODE (x)))),
3251 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3253 if (temp != stack_pointer_rtx)
3254 emit_move_insn (stack_pointer_rtx, temp);
3256 #ifdef STACK_GROWS_DOWNWARD
3258 offset2 = GET_MODE_SIZE (submode);
3260 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3261 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3262 + GET_MODE_SIZE (submode));
3265 emit_move_insn (change_address (x, submode,
3266 gen_rtx_PLUS (Pmode,
3268 GEN_INT (offset1))),
3269 gen_realpart (submode, y));
3270 emit_move_insn (change_address (x, submode,
3271 gen_rtx_PLUS (Pmode,
3273 GEN_INT (offset2))),
3274 gen_imagpart (submode, y));
3278 /* If this is a stack, push the highpart first, so it
3279 will be in the argument order.
3281 In that case, change_address is used only to convert
3282 the mode, not to change the address. */
3285 /* Note that the real part always precedes the imag part in memory
3286 regardless of machine's endianness. */
3287 #ifdef STACK_GROWS_DOWNWARD
3288 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3289 (gen_rtx_MEM (submode, XEXP (x, 0)),
3290 gen_imagpart (submode, y)));
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 gen_realpart (submode, y)));
3295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3296 (gen_rtx_MEM (submode, XEXP (x, 0)),
3297 gen_realpart (submode, y)));
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
3300 gen_imagpart (submode, y)));
3305 rtx realpart_x, realpart_y;
3306 rtx imagpart_x, imagpart_y;
3308 /* If this is a complex value with each part being smaller than a
3309 word, the usual calling sequence will likely pack the pieces into
3310 a single register. Unfortunately, SUBREG of hard registers only
3311 deals in terms of words, so we have a problem converting input
3312 arguments to the CONCAT of two registers that is used elsewhere
3313 for complex values. If this is before reload, we can copy it into
3314 memory and reload. FIXME, we should see about using extract and
3315 insert on integer registers, but complex short and complex char
3316 variables should be rarely used. */
3317 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3318 && (reload_in_progress | reload_completed) == 0)
3321 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3323 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3325 if (packed_dest_p || packed_src_p)
3327 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3328 ? MODE_FLOAT : MODE_INT);
3330 enum machine_mode reg_mode
3331 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3333 if (reg_mode != BLKmode)
3335 rtx mem = assign_stack_temp (reg_mode,
3336 GET_MODE_SIZE (mode), 0);
3337 rtx cmem = adjust_address (mem, mode, 0);
3340 = N_("function using short complex types cannot be inline");
3344 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3346 emit_move_insn_1 (cmem, y);
3347 return emit_move_insn_1 (sreg, mem);
3351 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3353 emit_move_insn_1 (mem, sreg);
3354 return emit_move_insn_1 (x, cmem);
3360 realpart_x = gen_realpart (submode, x);
3361 realpart_y = gen_realpart (submode, y);
3362 imagpart_x = gen_imagpart (submode, x);
3363 imagpart_y = gen_imagpart (submode, y);
3365 /* Show the output dies here. This is necessary for SUBREGs
3366 of pseudos since we cannot track their lifetimes correctly;
3367 hard regs shouldn't appear here except as return values.
3368 We never want to emit such a clobber after reload. */
3370 && ! (reload_in_progress || reload_completed)
3371 && (GET_CODE (realpart_x) == SUBREG
3372 || GET_CODE (imagpart_x) == SUBREG))
3373 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3375 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3376 (realpart_x, realpart_y));
3377 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3378 (imagpart_x, imagpart_y));
3381 return get_last_insn ();
3384 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3385 find a mode to do it in. If we have a movcc, use it. Otherwise,
3386 find the MODE_INT mode of the same width. */
3387 else if (GET_MODE_CLASS (mode) == MODE_CC
3388 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3390 enum insn_code insn_code;
3391 enum machine_mode tmode = VOIDmode;
3395 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3398 for (tmode = QImode; tmode != VOIDmode;
3399 tmode = GET_MODE_WIDER_MODE (tmode))
3400 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3403 if (tmode == VOIDmode)
3406 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3407 may call change_address which is not appropriate if we were
3408 called when a reload was in progress. We don't have to worry
3409 about changing the address since the size in bytes is supposed to
3410 be the same. Copy the MEM to change the mode and move any
3411 substitutions from the old MEM to the new one. */
3413 if (reload_in_progress)
3415 x = gen_lowpart_common (tmode, x1);
3416 if (x == 0 && GET_CODE (x1) == MEM)
3418 x = adjust_address_nv (x1, tmode, 0);
3419 copy_replacements (x1, x);
3422 y = gen_lowpart_common (tmode, y1);
3423 if (y == 0 && GET_CODE (y1) == MEM)
3425 y = adjust_address_nv (y1, tmode, 0);
3426 copy_replacements (y1, y);
3431 x = gen_lowpart (tmode, x);
3432 y = gen_lowpart (tmode, y);
3435 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3436 return emit_insn (GEN_FCN (insn_code) (x, y));
3439 /* This will handle any multi-word or full-word mode that lacks a move_insn
3440 pattern. However, you will get better code if you define such patterns,
3441 even if they must turn into multiple assembler instructions. */
3442 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3449 #ifdef PUSH_ROUNDING
3451 /* If X is a push on the stack, do the push now and replace
3452 X with a reference to the stack pointer. */
3453 if (push_operand (x, GET_MODE (x)))
3458 /* Do not use anti_adjust_stack, since we don't want to update
3459 stack_pointer_delta. */
3460 temp = expand_binop (Pmode,
3461 #ifdef STACK_GROWS_DOWNWARD
3469 (GET_MODE_SIZE (GET_MODE (x)))),
3470 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3472 if (temp != stack_pointer_rtx)
3473 emit_move_insn (stack_pointer_rtx, temp);
3475 code = GET_CODE (XEXP (x, 0));
3477 /* Just hope that small offsets off SP are OK. */
3478 if (code == POST_INC)
3479 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3480 GEN_INT (-((HOST_WIDE_INT)
3481 GET_MODE_SIZE (GET_MODE (x)))));
3482 else if (code == POST_DEC)
3483 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3484 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3486 temp = stack_pointer_rtx;
3488 x = change_address (x, VOIDmode, temp);
3492 /* If we are in reload, see if either operand is a MEM whose address
3493 is scheduled for replacement. */
3494 if (reload_in_progress && GET_CODE (x) == MEM
3495 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3496 x = replace_equiv_address_nv (x, inner);
3497 if (reload_in_progress && GET_CODE (y) == MEM
3498 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3499 y = replace_equiv_address_nv (y, inner);
3505 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3508 rtx xpart = operand_subword (x, i, 1, mode);
3509 rtx ypart = operand_subword (y, i, 1, mode);
3511 /* If we can't get a part of Y, put Y into memory if it is a
3512 constant. Otherwise, force it into a register. If we still
3513 can't get a part of Y, abort. */
3514 if (ypart == 0 && CONSTANT_P (y))
3516 y = force_const_mem (mode, y);
3517 ypart = operand_subword (y, i, 1, mode);
3519 else if (ypart == 0)
3520 ypart = operand_subword_force (y, i, mode);
3522 if (xpart == 0 || ypart == 0)
3525 need_clobber |= (GET_CODE (xpart) == SUBREG);
3527 last_insn = emit_move_insn (xpart, ypart);
3533 /* Show the output dies here. This is necessary for SUBREGs
3534 of pseudos since we cannot track their lifetimes correctly;
3535 hard regs shouldn't appear here except as return values.
3536 We never want to emit such a clobber after reload. */
3538 && ! (reload_in_progress || reload_completed)
3539 && need_clobber != 0)
3540 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3550 /* If Y is representable exactly in a narrower mode, and the target can
3551 perform the extension directly from constant or memory, then emit the
3552 move as an extension. */
3555 compress_float_constant (x, y)
3558 enum machine_mode dstmode = GET_MODE (x);
3559 enum machine_mode orig_srcmode = GET_MODE (y);
3560 enum machine_mode srcmode;
3563 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3565 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3566 srcmode != orig_srcmode;
3567 srcmode = GET_MODE_WIDER_MODE (srcmode))
3570 rtx trunc_y, last_insn;
3572 /* Skip if the target can't extend this way. */
3573 ic = can_extend_p (dstmode, srcmode, 0);
3574 if (ic == CODE_FOR_nothing)
3577 /* Skip if the narrowed value isn't exact. */
3578 if (! exact_real_truncate (srcmode, &r))
3581 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3583 if (LEGITIMATE_CONSTANT_P (trunc_y))
3585 /* Skip if the target needs extra instructions to perform
3587 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3590 else if (float_extend_from_mem[dstmode][srcmode])
3591 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3595 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3596 last_insn = get_last_insn ();
3598 if (GET_CODE (x) == REG)
3599 REG_NOTES (last_insn)
3600 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3608 /* Pushing data onto the stack. */
3610 /* Push a block of length SIZE (perhaps variable)
3611 and return an rtx to address the beginning of the block.
3612 Note that it is not possible for the value returned to be a QUEUED.
3613 The value may be virtual_outgoing_args_rtx.
3615 EXTRA is the number of bytes of padding to push in addition to SIZE.
3616 BELOW nonzero means this padding comes at low addresses;
3617 otherwise, the padding comes at high addresses. */
3620 push_block (size, extra, below)
3626 size = convert_modes (Pmode, ptr_mode, size, 1);
3627 if (CONSTANT_P (size))
3628 anti_adjust_stack (plus_constant (size, extra));
3629 else if (GET_CODE (size) == REG && extra == 0)
3630 anti_adjust_stack (size);
3633 temp = copy_to_mode_reg (Pmode, size);
3635 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3636 temp, 0, OPTAB_LIB_WIDEN);
3637 anti_adjust_stack (temp);
3640 #ifndef STACK_GROWS_DOWNWARD
3646 temp = virtual_outgoing_args_rtx;
3647 if (extra != 0 && below)
3648 temp = plus_constant (temp, extra);
3652 if (GET_CODE (size) == CONST_INT)
3653 temp = plus_constant (virtual_outgoing_args_rtx,
3654 -INTVAL (size) - (below ? 0 : extra));
3655 else if (extra != 0 && !below)
3656 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3657 negate_rtx (Pmode, plus_constant (size, extra)));
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3660 negate_rtx (Pmode, size));
3663 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3666 #ifdef PUSH_ROUNDING
3668 /* Emit single push insn. */
3671 emit_single_push_insn (mode, x, type)
3673 enum machine_mode mode;
3677 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3679 enum insn_code icode;
3680 insn_operand_predicate_fn pred;
3682 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3683 /* If there is push pattern, use it. Otherwise try old way of throwing
3684 MEM representing push operation to move expander. */
3685 icode = push_optab->handlers[(int) mode].insn_code;
3686 if (icode != CODE_FOR_nothing)
3688 if (((pred = insn_data[(int) icode].operand[0].predicate)
3689 && !((*pred) (x, mode))))
3690 x = force_reg (mode, x);
3691 emit_insn (GEN_FCN (icode) (x));
3694 if (GET_MODE_SIZE (mode) == rounded_size)
3695 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3698 #ifdef STACK_GROWS_DOWNWARD
3699 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3700 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3703 GEN_INT (rounded_size));
3705 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3708 dest = gen_rtx_MEM (mode, dest_addr);
3712 set_mem_attributes (dest, type, 1);
3714 if (flag_optimize_sibling_calls)
3715 /* Function incoming arguments may overlap with sibling call
3716 outgoing arguments and we cannot allow reordering of reads
3717 from function arguments with stores to outgoing arguments
3718 of sibling calls. */
3719 set_mem_alias_set (dest, 0);
3721 emit_move_insn (dest, x);
3725 /* Generate code to push X onto the stack, assuming it has mode MODE and
3727 MODE is redundant except when X is a CONST_INT (since they don't
3729 SIZE is an rtx for the size of data to be copied (in bytes),
3730 needed only if X is BLKmode.
3732 ALIGN (in bits) is maximum alignment we can assume.
3734 If PARTIAL and REG are both nonzero, then copy that many of the first
3735 words of X into registers starting with REG, and push the rest of X.
3736 The amount of space pushed is decreased by PARTIAL words,
3737 rounded *down* to a multiple of PARM_BOUNDARY.
3738 REG must be a hard register in this case.
3739 If REG is zero but PARTIAL is not, take any all others actions for an
3740 argument partially in registers, but do not actually load any
3743 EXTRA is the amount in bytes of extra space to leave next to this arg.
3744 This is ignored if an argument block has already been allocated.
3746 On a machine that lacks real push insns, ARGS_ADDR is the address of
3747 the bottom of the argument block for this call. We use indexing off there
3748 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3749 argument block has not been preallocated.
3751 ARGS_SO_FAR is the size of args previously pushed for this call.
3753 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3754 for arguments passed in registers. If nonzero, it will be the number
3755 of bytes required. */
3758 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3759 args_addr, args_so_far, reg_parm_stack_space,
3762 enum machine_mode mode;
3771 int reg_parm_stack_space;
3775 enum direction stack_direction
3776 #ifdef STACK_GROWS_DOWNWARD
3782 /* Decide where to pad the argument: `downward' for below,
3783 `upward' for above, or `none' for don't pad it.
3784 Default is below for small data on big-endian machines; else above. */
3785 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3787 /* Invert direction if stack is post-decrement.
3789 if (STACK_PUSH_CODE == POST_DEC)
3790 if (where_pad != none)
3791 where_pad = (where_pad == downward ? upward : downward);
3793 xinner = x = protect_from_queue (x, 0);
3795 if (mode == BLKmode)
3797 /* Copy a block into the stack, entirely or partially. */
3800 int used = partial * UNITS_PER_WORD;
3801 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3809 /* USED is now the # of bytes we need not copy to the stack
3810 because registers will take care of them. */
3813 xinner = adjust_address (xinner, BLKmode, used);
3815 /* If the partial register-part of the arg counts in its stack size,
3816 skip the part of stack space corresponding to the registers.
3817 Otherwise, start copying to the beginning of the stack space,
3818 by setting SKIP to 0. */
3819 skip = (reg_parm_stack_space == 0) ? 0 : used;
3821 #ifdef PUSH_ROUNDING
3822 /* Do it with several push insns if that doesn't take lots of insns
3823 and if there is no difficulty with push insns that skip bytes
3824 on the stack for alignment purposes. */
3827 && GET_CODE (size) == CONST_INT
3829 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3830 /* Here we avoid the case of a structure whose weak alignment
3831 forces many pushes of a small amount of data,
3832 and such small pushes do rounding that causes trouble. */
3833 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3834 || align >= BIGGEST_ALIGNMENT
3835 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3836 == (align / BITS_PER_UNIT)))
3837 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3839 /* Push padding now if padding above and stack grows down,
3840 or if padding below and stack grows up.
3841 But if space already allocated, this has already been done. */
3842 if (extra && args_addr == 0
3843 && where_pad != none && where_pad != stack_direction)
3844 anti_adjust_stack (GEN_INT (extra));
3846 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3849 #endif /* PUSH_ROUNDING */
3853 /* Otherwise make space on the stack and copy the data
3854 to the address of that space. */
3856 /* Deduct words put into registers from the size we must copy. */
3859 if (GET_CODE (size) == CONST_INT)
3860 size = GEN_INT (INTVAL (size) - used);
3862 size = expand_binop (GET_MODE (size), sub_optab, size,
3863 GEN_INT (used), NULL_RTX, 0,
3867 /* Get the address of the stack space.
3868 In this case, we do not deal with EXTRA separately.
3869 A single stack adjust will do. */
3872 temp = push_block (size, extra, where_pad == downward);
3875 else if (GET_CODE (args_so_far) == CONST_INT)
3876 temp = memory_address (BLKmode,
3877 plus_constant (args_addr,
3878 skip + INTVAL (args_so_far)));
3880 temp = memory_address (BLKmode,
3881 plus_constant (gen_rtx_PLUS (Pmode,
3886 if (!ACCUMULATE_OUTGOING_ARGS)
3888 /* If the source is referenced relative to the stack pointer,
3889 copy it to another register to stabilize it. We do not need
3890 to do this if we know that we won't be changing sp. */
3892 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3893 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3894 temp = copy_to_reg (temp);
3897 target = gen_rtx_MEM (BLKmode, temp);
3901 set_mem_attributes (target, type, 1);
3902 /* Function incoming arguments may overlap with sibling call
3903 outgoing arguments and we cannot allow reordering of reads
3904 from function arguments with stores to outgoing arguments
3905 of sibling calls. */
3906 set_mem_alias_set (target, 0);
3909 /* ALIGN may well be better aligned than TYPE, e.g. due to
3910 PARM_BOUNDARY. Assume the caller isn't lying. */
3911 set_mem_align (target, align);
3913 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3916 else if (partial > 0)
3918 /* Scalar partly in registers. */
3920 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3923 /* # words of start of argument
3924 that we must make space for but need not store. */
3925 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3926 int args_offset = INTVAL (args_so_far);
3929 /* Push padding now if padding above and stack grows down,
3930 or if padding below and stack grows up.
3931 But if space already allocated, this has already been done. */
3932 if (extra && args_addr == 0
3933 && where_pad != none && where_pad != stack_direction)
3934 anti_adjust_stack (GEN_INT (extra));
3936 /* If we make space by pushing it, we might as well push
3937 the real data. Otherwise, we can leave OFFSET nonzero
3938 and leave the space uninitialized. */
3942 /* Now NOT_STACK gets the number of words that we don't need to
3943 allocate on the stack. */
3944 not_stack = partial - offset;
3946 /* If the partial register-part of the arg counts in its stack size,
3947 skip the part of stack space corresponding to the registers.
3948 Otherwise, start copying to the beginning of the stack space,
3949 by setting SKIP to 0. */
3950 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3952 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3953 x = validize_mem (force_const_mem (mode, x));
3955 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3956 SUBREGs of such registers are not allowed. */
3957 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3958 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3959 x = copy_to_reg (x);
3961 /* Loop over all the words allocated on the stack for this arg. */
3962 /* We can do it by words, because any scalar bigger than a word
3963 has a size a multiple of a word. */
3964 #ifndef PUSH_ARGS_REVERSED
3965 for (i = not_stack; i < size; i++)
3967 for (i = size - 1; i >= not_stack; i--)
3969 if (i >= not_stack + offset)
3970 emit_push_insn (operand_subword_force (x, i, mode),
3971 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3973 GEN_INT (args_offset + ((i - not_stack + skip)
3975 reg_parm_stack_space, alignment_pad);
3982 /* Push padding now if padding above and stack grows down,
3983 or if padding below and stack grows up.
3984 But if space already allocated, this has already been done. */
3985 if (extra && args_addr == 0
3986 && where_pad != none && where_pad != stack_direction)
3987 anti_adjust_stack (GEN_INT (extra));
3989 #ifdef PUSH_ROUNDING
3990 if (args_addr == 0 && PUSH_ARGS)
3991 emit_single_push_insn (mode, x, type);
3995 if (GET_CODE (args_so_far) == CONST_INT)
3997 = memory_address (mode,
3998 plus_constant (args_addr,
3999 INTVAL (args_so_far)));
4001 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4003 dest = gen_rtx_MEM (mode, addr);
4006 set_mem_attributes (dest, type, 1);
4007 /* Function incoming arguments may overlap with sibling call
4008 outgoing arguments and we cannot allow reordering of reads
4009 from function arguments with stores to outgoing arguments
4010 of sibling calls. */
4011 set_mem_alias_set (dest, 0);
4014 emit_move_insn (dest, x);
4018 /* If part should go in registers, copy that part
4019 into the appropriate registers. Do this now, at the end,
4020 since mem-to-mem copies above may do function calls. */
4021 if (partial > 0 && reg != 0)
4023 /* Handle calls that pass values in multiple non-contiguous locations.
4024 The Irix 6 ABI has examples of this. */
4025 if (GET_CODE (reg) == PARALLEL)
4026 emit_group_load (reg, x, -1); /* ??? size? */
4028 move_block_to_reg (REGNO (reg), x, partial, mode);
4031 if (extra && args_addr == 0 && where_pad == stack_direction)
4032 anti_adjust_stack (GEN_INT (extra));
4034 if (alignment_pad && args_addr == 0)
4035 anti_adjust_stack (alignment_pad);
4038 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4046 /* Only registers can be subtargets. */
4047 || GET_CODE (x) != REG
4048 /* If the register is readonly, it can't be set more than once. */
4049 || RTX_UNCHANGING_P (x)
4050 /* Don't use hard regs to avoid extending their life. */
4051 || REGNO (x) < FIRST_PSEUDO_REGISTER
4052 /* Avoid subtargets inside loops,
4053 since they hide some invariant expressions. */
4054 || preserve_subexpressions_p ())
4058 /* Expand an assignment that stores the value of FROM into TO.
4059 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4060 (This may contain a QUEUED rtx;
4061 if the value is constant, this rtx is a constant.)
4062 Otherwise, the returned value is NULL_RTX.
4064 SUGGEST_REG is no longer actually used.
4065 It used to mean, copy the value through a register
4066 and return that register, if that is possible.
4067 We now use WANT_VALUE to decide whether to do this. */
4070 expand_assignment (to, from, want_value, suggest_reg)
4073 int suggest_reg ATTRIBUTE_UNUSED;
4078 /* Don't crash if the lhs of the assignment was erroneous. */
4080 if (TREE_CODE (to) == ERROR_MARK)
4082 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4083 return want_value ? result : NULL_RTX;
4086 /* Assignment of a structure component needs special treatment
4087 if the structure component's rtx is not simply a MEM.
4088 Assignment of an array element at a constant index, and assignment of
4089 an array element in an unaligned packed structure field, has the same
4092 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4093 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4094 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4096 enum machine_mode mode1;
4097 HOST_WIDE_INT bitsize, bitpos;
4105 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4106 &unsignedp, &volatilep);
4108 /* If we are going to use store_bit_field and extract_bit_field,
4109 make sure to_rtx will be safe for multiple use. */
4111 if (mode1 == VOIDmode && want_value)
4112 tem = stabilize_reference (tem);
4114 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4118 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4120 if (GET_CODE (to_rtx) != MEM)
4123 #ifdef POINTERS_EXTEND_UNSIGNED
4124 if (GET_MODE (offset_rtx) != Pmode)
4125 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4127 if (GET_MODE (offset_rtx) != ptr_mode)
4128 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4131 /* A constant address in TO_RTX can have VOIDmode, we must not try
4132 to call force_reg for that case. Avoid that case. */
4133 if (GET_CODE (to_rtx) == MEM
4134 && GET_MODE (to_rtx) == BLKmode
4135 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4137 && (bitpos % bitsize) == 0
4138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4139 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4141 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4145 to_rtx = offset_address (to_rtx, offset_rtx,
4146 highest_pow2_factor_for_type (TREE_TYPE (to),
4150 if (GET_CODE (to_rtx) == MEM)
4152 /* If the field is at offset zero, we could have been given the
4153 DECL_RTX of the parent struct. Don't munge it. */
4154 to_rtx = shallow_copy_rtx (to_rtx);
4156 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4159 /* Deal with volatile and readonly fields. The former is only done
4160 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4161 if (volatilep && GET_CODE (to_rtx) == MEM)
4163 if (to_rtx == orig_to_rtx)
4164 to_rtx = copy_rtx (to_rtx);
4165 MEM_VOLATILE_P (to_rtx) = 1;
4168 if (TREE_CODE (to) == COMPONENT_REF
4169 && TREE_READONLY (TREE_OPERAND (to, 1)))
4171 if (to_rtx == orig_to_rtx)
4172 to_rtx = copy_rtx (to_rtx);
4173 RTX_UNCHANGING_P (to_rtx) = 1;
4176 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4178 if (to_rtx == orig_to_rtx)
4179 to_rtx = copy_rtx (to_rtx);
4180 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4183 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4185 /* Spurious cast for HPUX compiler. */
4186 ? ((enum machine_mode)
4187 TYPE_MODE (TREE_TYPE (to)))
4189 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4191 preserve_temp_slots (result);
4195 /* If the value is meaningful, convert RESULT to the proper mode.
4196 Otherwise, return nothing. */
4197 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4198 TYPE_MODE (TREE_TYPE (from)),
4200 TREE_UNSIGNED (TREE_TYPE (to)))
4204 /* If the rhs is a function call and its value is not an aggregate,
4205 call the function before we start to compute the lhs.
4206 This is needed for correct code for cases such as
4207 val = setjmp (buf) on machines where reference to val
4208 requires loading up part of an address in a separate insn.
4210 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4211 since it might be a promoted variable where the zero- or sign- extension
4212 needs to be done. Handling this in the normal way is safe because no
4213 computation is done before the call. */
4214 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4215 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4216 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4217 && GET_CODE (DECL_RTL (to)) == REG))
4222 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4224 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4226 /* Handle calls that return values in multiple non-contiguous locations.
4227 The Irix 6 ABI has examples of this. */
4228 if (GET_CODE (to_rtx) == PARALLEL)
4229 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4230 else if (GET_MODE (to_rtx) == BLKmode)
4231 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4234 #ifdef POINTERS_EXTEND_UNSIGNED
4235 if (POINTER_TYPE_P (TREE_TYPE (to))
4236 && GET_MODE (to_rtx) != GET_MODE (value))
4237 value = convert_memory_address (GET_MODE (to_rtx), value);
4239 emit_move_insn (to_rtx, value);
4241 preserve_temp_slots (to_rtx);
4244 return want_value ? to_rtx : NULL_RTX;
4247 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4248 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4251 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4253 /* Don't move directly into a return register. */
4254 if (TREE_CODE (to) == RESULT_DECL
4255 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4260 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4262 if (GET_CODE (to_rtx) == PARALLEL)
4263 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4265 emit_move_insn (to_rtx, temp);
4267 preserve_temp_slots (to_rtx);
4270 return want_value ? to_rtx : NULL_RTX;
4273 /* In case we are returning the contents of an object which overlaps
4274 the place the value is being stored, use a safe function when copying
4275 a value through a pointer into a structure value return block. */
4276 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4277 && current_function_returns_struct
4278 && !current_function_returns_pcc_struct)
4283 size = expr_size (from);
4284 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4286 if (TARGET_MEM_FUNCTIONS)
4287 emit_library_call (memmove_libfunc, LCT_NORMAL,
4288 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4289 XEXP (from_rtx, 0), Pmode,
4290 convert_to_mode (TYPE_MODE (sizetype),
4291 size, TREE_UNSIGNED (sizetype)),
4292 TYPE_MODE (sizetype));
4294 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4295 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4296 XEXP (to_rtx, 0), Pmode,
4297 convert_to_mode (TYPE_MODE (integer_type_node),
4299 TREE_UNSIGNED (integer_type_node)),
4300 TYPE_MODE (integer_type_node));
4302 preserve_temp_slots (to_rtx);
4305 return want_value ? to_rtx : NULL_RTX;
4308 /* Compute FROM and store the value in the rtx we got. */
4311 result = store_expr (from, to_rtx, want_value);
4312 preserve_temp_slots (result);
4315 return want_value ? result : NULL_RTX;
4318 /* Generate code for computing expression EXP,
4319 and storing the value into TARGET.
4320 TARGET may contain a QUEUED rtx.
4322 If WANT_VALUE & 1 is nonzero, return a copy of the value
4323 not in TARGET, so that we can be sure to use the proper
4324 value in a containing expression even if TARGET has something
4325 else stored in it. If possible, we copy the value through a pseudo
4326 and return that pseudo. Or, if the value is constant, we try to
4327 return the constant. In some cases, we return a pseudo
4328 copied *from* TARGET.
4330 If the mode is BLKmode then we may return TARGET itself.
4331 It turns out that in BLKmode it doesn't cause a problem.
4332 because C has no operators that could combine two different
4333 assignments into the same BLKmode object with different values
4334 with no sequence point. Will other languages need this to
4337 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4338 to catch quickly any cases where the caller uses the value
4339 and fails to set WANT_VALUE.
4341 If WANT_VALUE & 2 is set, this is a store into a call param on the
4342 stack, and block moves may need to be treated specially. */
4345 store_expr (exp, target, want_value)
4351 int dont_return_target = 0;
4352 int dont_store_target = 0;
4354 if (VOID_TYPE_P (TREE_TYPE (exp)))
4356 /* C++ can generate ?: expressions with a throw expression in one
4357 branch and an rvalue in the other. Here, we resolve attempts to
4358 store the throw expression's nonexistant result. */
4361 expand_expr (exp, const0_rtx, VOIDmode, 0);
4364 if (TREE_CODE (exp) == COMPOUND_EXPR)
4366 /* Perform first part of compound expression, then assign from second
4368 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4369 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4371 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4373 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4375 /* For conditional expression, get safe form of the target. Then
4376 test the condition, doing the appropriate assignment on either
4377 side. This avoids the creation of unnecessary temporaries.
4378 For non-BLKmode, it is more efficient not to do this. */
4380 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4383 target = protect_from_queue (target, 1);
4385 do_pending_stack_adjust ();
4387 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4388 start_cleanup_deferral ();
4389 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4390 end_cleanup_deferral ();
4392 emit_jump_insn (gen_jump (lab2));
4395 start_cleanup_deferral ();
4396 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4397 end_cleanup_deferral ();
4402 return want_value & 1 ? target : NULL_RTX;
4404 else if (queued_subexp_p (target))
4405 /* If target contains a postincrement, let's not risk
4406 using it as the place to generate the rhs. */
4408 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4410 /* Expand EXP into a new pseudo. */
4411 temp = gen_reg_rtx (GET_MODE (target));
4412 temp = expand_expr (exp, temp, GET_MODE (target),
4414 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4417 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4419 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4421 /* If target is volatile, ANSI requires accessing the value
4422 *from* the target, if it is accessed. So make that happen.
4423 In no case return the target itself. */
4424 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4425 dont_return_target = 1;
4427 else if ((want_value & 1) != 0
4428 && GET_CODE (target) == MEM
4429 && ! MEM_VOLATILE_P (target)
4430 && GET_MODE (target) != BLKmode)
4431 /* If target is in memory and caller wants value in a register instead,
4432 arrange that. Pass TARGET as target for expand_expr so that,
4433 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4434 We know expand_expr will not use the target in that case.
4435 Don't do this if TARGET is volatile because we are supposed
4436 to write it and then read it. */
4438 temp = expand_expr (exp, target, GET_MODE (target),
4439 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4440 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4442 /* If TEMP is already in the desired TARGET, only copy it from
4443 memory and don't store it there again. */
4445 || (rtx_equal_p (temp, target)
4446 && ! side_effects_p (temp) && ! side_effects_p (target)))
4447 dont_store_target = 1;
4448 temp = copy_to_reg (temp);
4450 dont_return_target = 1;
4452 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4453 /* If this is a scalar in a register that is stored in a wider mode
4454 than the declared mode, compute the result into its declared mode
4455 and then convert to the wider mode. Our value is the computed
4458 rtx inner_target = 0;
4460 /* If we don't want a value, we can do the conversion inside EXP,
4461 which will often result in some optimizations. Do the conversion
4462 in two steps: first change the signedness, if needed, then
4463 the extend. But don't do this if the type of EXP is a subtype
4464 of something else since then the conversion might involve
4465 more than just converting modes. */
4466 if ((want_value & 1) == 0
4467 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4468 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4470 if (TREE_UNSIGNED (TREE_TYPE (exp))
4471 != SUBREG_PROMOTED_UNSIGNED_P (target))
4473 ((*lang_hooks.types.signed_or_unsigned_type)
4474 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4476 exp = convert ((*lang_hooks.types.type_for_mode)
4477 (GET_MODE (SUBREG_REG (target)),
4478 SUBREG_PROMOTED_UNSIGNED_P (target)),
4481 inner_target = SUBREG_REG (target);
4484 temp = expand_expr (exp, inner_target, VOIDmode,
4485 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4487 /* If TEMP is a MEM and we want a result value, make the access
4488 now so it gets done only once. Strictly speaking, this is
4489 only necessary if the MEM is volatile, or if the address
4490 overlaps TARGET. But not performing the load twice also
4491 reduces the amount of rtl we generate and then have to CSE. */
4492 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4493 temp = copy_to_reg (temp);
4495 /* If TEMP is a VOIDmode constant, use convert_modes to make
4496 sure that we properly convert it. */
4497 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4499 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4500 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4501 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4502 GET_MODE (target), temp,
4503 SUBREG_PROMOTED_UNSIGNED_P (target));
4506 convert_move (SUBREG_REG (target), temp,
4507 SUBREG_PROMOTED_UNSIGNED_P (target));
4509 /* If we promoted a constant, change the mode back down to match
4510 target. Otherwise, the caller might get confused by a result whose
4511 mode is larger than expected. */
4513 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4515 if (GET_MODE (temp) != VOIDmode)
4517 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4518 SUBREG_PROMOTED_VAR_P (temp) = 1;
4519 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4520 SUBREG_PROMOTED_UNSIGNED_P (target));
4523 temp = convert_modes (GET_MODE (target),
4524 GET_MODE (SUBREG_REG (target)),
4525 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4528 return want_value & 1 ? temp : NULL_RTX;
4532 temp = expand_expr (exp, target, GET_MODE (target),
4533 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4534 /* Return TARGET if it's a specified hardware register.
4535 If TARGET is a volatile mem ref, either return TARGET
4536 or return a reg copied *from* TARGET; ANSI requires this.
4538 Otherwise, if TEMP is not TARGET, return TEMP
4539 if it is constant (for efficiency),
4540 or if we really want the correct value. */
4541 if (!(target && GET_CODE (target) == REG
4542 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4543 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4544 && ! rtx_equal_p (temp, target)
4545 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4546 dont_return_target = 1;
4549 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4550 the same as that of TARGET, adjust the constant. This is needed, for
4551 example, in case it is a CONST_DOUBLE and we want only a word-sized
4553 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4554 && TREE_CODE (exp) != ERROR_MARK
4555 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4556 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4557 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4559 /* If value was not generated in the target, store it there.
4560 Convert the value to TARGET's type first if necessary.
4561 If TEMP and TARGET compare equal according to rtx_equal_p, but
4562 one or both of them are volatile memory refs, we have to distinguish
4564 - expand_expr has used TARGET. In this case, we must not generate
4565 another copy. This can be detected by TARGET being equal according
4567 - expand_expr has not used TARGET - that means that the source just
4568 happens to have the same RTX form. Since temp will have been created
4569 by expand_expr, it will compare unequal according to == .
4570 We must generate a copy in this case, to reach the correct number
4571 of volatile memory references. */
4573 if ((! rtx_equal_p (temp, target)
4574 || (temp != target && (side_effects_p (temp)
4575 || side_effects_p (target))))
4576 && TREE_CODE (exp) != ERROR_MARK
4577 && ! dont_store_target
4578 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4579 but TARGET is not valid memory reference, TEMP will differ
4580 from TARGET although it is really the same location. */
4581 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4582 || target != DECL_RTL_IF_SET (exp))
4583 /* If there's nothing to copy, don't bother. Don't call expr_size
4584 unless necessary, because some front-ends (C++) expr_size-hook
4585 aborts on objects that are not supposed to be bit-copied or
4587 && expr_size (exp) != const0_rtx)
4589 target = protect_from_queue (target, 1);
4590 if (GET_MODE (temp) != GET_MODE (target)
4591 && GET_MODE (temp) != VOIDmode)
4593 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4594 if (dont_return_target)
4596 /* In this case, we will return TEMP,
4597 so make sure it has the proper mode.
4598 But don't forget to store the value into TARGET. */
4599 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4600 emit_move_insn (target, temp);
4603 convert_move (target, temp, unsignedp);
4606 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4608 /* Handle copying a string constant into an array. The string
4609 constant may be shorter than the array. So copy just the string's
4610 actual length, and clear the rest. First get the size of the data
4611 type of the string, which is actually the size of the target. */
4612 rtx size = expr_size (exp);
4614 if (GET_CODE (size) == CONST_INT
4615 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4616 emit_block_move (target, temp, size,
4618 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4621 /* Compute the size of the data to copy from the string. */
4623 = size_binop (MIN_EXPR,
4624 make_tree (sizetype, size),
4625 size_int (TREE_STRING_LENGTH (exp)));
4627 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4629 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4632 /* Copy that much. */
4633 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4634 emit_block_move (target, temp, copy_size_rtx,
4636 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4638 /* Figure out how much is left in TARGET that we have to clear.
4639 Do all calculations in ptr_mode. */
4640 if (GET_CODE (copy_size_rtx) == CONST_INT)
4642 size = plus_constant (size, -INTVAL (copy_size_rtx));
4643 target = adjust_address (target, BLKmode,
4644 INTVAL (copy_size_rtx));
4648 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4649 copy_size_rtx, NULL_RTX, 0,
4652 #ifdef POINTERS_EXTEND_UNSIGNED
4653 if (GET_MODE (copy_size_rtx) != Pmode)
4654 copy_size_rtx = convert_memory_address (Pmode,
4658 target = offset_address (target, copy_size_rtx,
4659 highest_pow2_factor (copy_size));
4660 label = gen_label_rtx ();
4661 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4662 GET_MODE (size), 0, label);
4665 if (size != const0_rtx)
4666 clear_storage (target, size);
4672 /* Handle calls that return values in multiple non-contiguous locations.
4673 The Irix 6 ABI has examples of this. */
4674 else if (GET_CODE (target) == PARALLEL)
4675 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4676 else if (GET_MODE (temp) == BLKmode)
4677 emit_block_move (target, temp, expr_size (exp),
4679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4681 emit_move_insn (target, temp);
4684 /* If we don't want a value, return NULL_RTX. */
4685 if ((want_value & 1) == 0)
4688 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4689 ??? The latter test doesn't seem to make sense. */
4690 else if (dont_return_target && GET_CODE (temp) != MEM)
4693 /* Return TARGET itself if it is a hard register. */
4694 else if ((want_value & 1) != 0
4695 && GET_MODE (target) != BLKmode
4696 && ! (GET_CODE (target) == REG
4697 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4698 return copy_to_reg (target);
4704 /* Return 1 if EXP just contains zeros. */
4712 switch (TREE_CODE (exp))
4716 case NON_LVALUE_EXPR:
4717 case VIEW_CONVERT_EXPR:
4718 return is_zeros_p (TREE_OPERAND (exp, 0));
4721 return integer_zerop (exp);
4725 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4728 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4731 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4732 elt = TREE_CHAIN (elt))
4733 if (!is_zeros_p (TREE_VALUE (elt)))
4739 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4740 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4741 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4742 if (! is_zeros_p (TREE_VALUE (elt)))
4752 /* Return 1 if EXP contains mostly (3/4) zeros. */
4755 mostly_zeros_p (exp)
4758 if (TREE_CODE (exp) == CONSTRUCTOR)
4760 int elts = 0, zeros = 0;
4761 tree elt = CONSTRUCTOR_ELTS (exp);
4762 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4764 /* If there are no ranges of true bits, it is all zero. */
4765 return elt == NULL_TREE;
4767 for (; elt; elt = TREE_CHAIN (elt))
4769 /* We do not handle the case where the index is a RANGE_EXPR,
4770 so the statistic will be somewhat inaccurate.
4771 We do make a more accurate count in store_constructor itself,
4772 so since this function is only used for nested array elements,
4773 this should be close enough. */
4774 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 return 4 * zeros >= 3 * elts;
4782 return is_zeros_p (exp);
4785 /* Helper function for store_constructor.
4786 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4787 TYPE is the type of the CONSTRUCTOR, not the element type.
4788 CLEARED is as for store_constructor.
4789 ALIAS_SET is the alias set to use for any stores.
4791 This provides a recursive shortcut back to store_constructor when it isn't
4792 necessary to go through store_field. This is so that we can pass through
4793 the cleared field to let store_constructor know that we may not have to
4794 clear a substructure if the outer structure has already been cleared. */
4797 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4800 unsigned HOST_WIDE_INT bitsize;
4801 HOST_WIDE_INT bitpos;
4802 enum machine_mode mode;
4807 if (TREE_CODE (exp) == CONSTRUCTOR
4808 && bitpos % BITS_PER_UNIT == 0
4809 /* If we have a nonzero bitpos for a register target, then we just
4810 let store_field do the bitfield handling. This is unlikely to
4811 generate unnecessary clear instructions anyways. */
4812 && (bitpos == 0 || GET_CODE (target) == MEM))
4814 if (GET_CODE (target) == MEM)
4816 = adjust_address (target,
4817 GET_MODE (target) == BLKmode
4819 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4820 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4823 /* Update the alias set, if required. */
4824 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4825 && MEM_ALIAS_SET (target) != 0)
4827 target = copy_rtx (target);
4828 set_mem_alias_set (target, alias_set);
4831 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4834 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4838 /* Store the value of constructor EXP into the rtx TARGET.
4839 TARGET is either a REG or a MEM; we know it cannot conflict, since
4840 safe_from_p has been called.
4841 CLEARED is true if TARGET is known to have been zero'd.
4842 SIZE is the number of bytes of TARGET we are allowed to modify: this
4843 may not be the same as the size of EXP if we are assigning to a field
4844 which has been packed to exclude padding bits. */
4847 store_constructor (exp, target, cleared, size)
4853 tree type = TREE_TYPE (exp);
4854 #ifdef WORD_REGISTER_OPERATIONS
4855 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4858 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4859 || TREE_CODE (type) == QUAL_UNION_TYPE)
4863 /* We either clear the aggregate or indicate the value is dead. */
4864 if ((TREE_CODE (type) == UNION_TYPE
4865 || TREE_CODE (type) == QUAL_UNION_TYPE)
4867 && ! CONSTRUCTOR_ELTS (exp))
4868 /* If the constructor is empty, clear the union. */
4870 clear_storage (target, expr_size (exp));
4874 /* If we are building a static constructor into a register,
4875 set the initial value as zero so we can fold the value into
4876 a constant. But if more than one register is involved,
4877 this probably loses. */
4878 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4879 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4881 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4885 /* If the constructor has fewer fields than the structure
4886 or if we are initializing the structure to mostly zeros,
4887 clear the whole structure first. Don't do this if TARGET is a
4888 register whose mode size isn't equal to SIZE since clear_storage
4889 can't handle this case. */
4890 else if (! cleared && size > 0
4891 && ((list_length (CONSTRUCTOR_ELTS (exp))
4892 != fields_length (type))
4893 || mostly_zeros_p (exp))
4894 && (GET_CODE (target) != REG
4895 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4898 clear_storage (target, GEN_INT (size));
4903 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4905 /* Store each element of the constructor into
4906 the corresponding field of TARGET. */
4908 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4910 tree field = TREE_PURPOSE (elt);
4911 tree value = TREE_VALUE (elt);
4912 enum machine_mode mode;
4913 HOST_WIDE_INT bitsize;
4914 HOST_WIDE_INT bitpos = 0;
4916 rtx to_rtx = target;
4918 /* Just ignore missing fields.
4919 We cleared the whole structure, above,
4920 if any fields are missing. */
4924 if (cleared && is_zeros_p (value))
4927 if (host_integerp (DECL_SIZE (field), 1))
4928 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4932 mode = DECL_MODE (field);
4933 if (DECL_BIT_FIELD (field))
4936 offset = DECL_FIELD_OFFSET (field);
4937 if (host_integerp (offset, 0)
4938 && host_integerp (bit_position (field), 0))
4940 bitpos = int_bit_position (field);
4944 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4950 if (contains_placeholder_p (offset))
4951 offset = build (WITH_RECORD_EXPR, sizetype,
4952 offset, make_tree (TREE_TYPE (exp), target));
4954 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4955 if (GET_CODE (to_rtx) != MEM)
4958 #ifdef POINTERS_EXTEND_UNSIGNED
4959 if (GET_MODE (offset_rtx) != Pmode)
4960 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4962 if (GET_MODE (offset_rtx) != ptr_mode)
4963 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4966 to_rtx = offset_address (to_rtx, offset_rtx,
4967 highest_pow2_factor (offset));
4970 if (TREE_READONLY (field))
4972 if (GET_CODE (to_rtx) == MEM)
4973 to_rtx = copy_rtx (to_rtx);
4975 RTX_UNCHANGING_P (to_rtx) = 1;
4978 #ifdef WORD_REGISTER_OPERATIONS
4979 /* If this initializes a field that is smaller than a word, at the
4980 start of a word, try to widen it to a full word.
4981 This special case allows us to output C++ member function
4982 initializations in a form that the optimizers can understand. */
4983 if (GET_CODE (target) == REG
4984 && bitsize < BITS_PER_WORD
4985 && bitpos % BITS_PER_WORD == 0
4986 && GET_MODE_CLASS (mode) == MODE_INT
4987 && TREE_CODE (value) == INTEGER_CST
4989 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4991 tree type = TREE_TYPE (value);
4993 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4995 type = (*lang_hooks.types.type_for_size)
4996 (BITS_PER_WORD, TREE_UNSIGNED (type));
4997 value = convert (type, value);
5000 if (BYTES_BIG_ENDIAN)
5002 = fold (build (LSHIFT_EXPR, type, value,
5003 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5004 bitsize = BITS_PER_WORD;
5009 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5010 && DECL_NONADDRESSABLE_P (field))
5012 to_rtx = copy_rtx (to_rtx);
5013 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5016 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5017 value, type, cleared,
5018 get_alias_set (TREE_TYPE (field)));
5021 else if (TREE_CODE (type) == ARRAY_TYPE
5022 || TREE_CODE (type) == VECTOR_TYPE)
5027 tree domain = TYPE_DOMAIN (type);
5028 tree elttype = TREE_TYPE (type);
5030 HOST_WIDE_INT minelt = 0;
5031 HOST_WIDE_INT maxelt = 0;
5033 /* Vectors are like arrays, but the domain is stored via an array
5035 if (TREE_CODE (type) == VECTOR_TYPE)
5037 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5038 the same field as TYPE_DOMAIN, we are not guaranteed that
5040 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5041 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5044 const_bounds_p = (TYPE_MIN_VALUE (domain)
5045 && TYPE_MAX_VALUE (domain)
5046 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5047 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5049 /* If we have constant bounds for the range of the type, get them. */
5052 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5053 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5056 /* If the constructor has fewer elements than the array,
5057 clear the whole array first. Similarly if this is
5058 static constructor of a non-BLKmode object. */
5059 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5063 HOST_WIDE_INT count = 0, zero_count = 0;
5064 need_to_clear = ! const_bounds_p;
5066 /* This loop is a more accurate version of the loop in
5067 mostly_zeros_p (it handles RANGE_EXPR in an index).
5068 It is also needed to check for missing elements. */
5069 for (elt = CONSTRUCTOR_ELTS (exp);
5070 elt != NULL_TREE && ! need_to_clear;
5071 elt = TREE_CHAIN (elt))
5073 tree index = TREE_PURPOSE (elt);
5074 HOST_WIDE_INT this_node_count;
5076 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5078 tree lo_index = TREE_OPERAND (index, 0);
5079 tree hi_index = TREE_OPERAND (index, 1);
5081 if (! host_integerp (lo_index, 1)
5082 || ! host_integerp (hi_index, 1))
5088 this_node_count = (tree_low_cst (hi_index, 1)
5089 - tree_low_cst (lo_index, 1) + 1);
5092 this_node_count = 1;
5094 count += this_node_count;
5095 if (mostly_zeros_p (TREE_VALUE (elt)))
5096 zero_count += this_node_count;
5099 /* Clear the entire array first if there are any missing elements,
5100 or if the incidence of zero elements is >= 75%. */
5102 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5106 if (need_to_clear && size > 0)
5111 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5113 clear_storage (target, GEN_INT (size));
5117 else if (REG_P (target))
5118 /* Inform later passes that the old value is dead. */
5119 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5121 /* Store each element of the constructor into
5122 the corresponding element of TARGET, determined
5123 by counting the elements. */
5124 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5126 elt = TREE_CHAIN (elt), i++)
5128 enum machine_mode mode;
5129 HOST_WIDE_INT bitsize;
5130 HOST_WIDE_INT bitpos;
5132 tree value = TREE_VALUE (elt);
5133 tree index = TREE_PURPOSE (elt);
5134 rtx xtarget = target;
5136 if (cleared && is_zeros_p (value))
5139 unsignedp = TREE_UNSIGNED (elttype);
5140 mode = TYPE_MODE (elttype);
5141 if (mode == BLKmode)
5142 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5143 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5146 bitsize = GET_MODE_BITSIZE (mode);
5148 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5150 tree lo_index = TREE_OPERAND (index, 0);
5151 tree hi_index = TREE_OPERAND (index, 1);
5152 rtx index_r, pos_rtx, loop_end;
5153 struct nesting *loop;
5154 HOST_WIDE_INT lo, hi, count;
5157 /* If the range is constant and "small", unroll the loop. */
5159 && host_integerp (lo_index, 0)
5160 && host_integerp (hi_index, 0)
5161 && (lo = tree_low_cst (lo_index, 0),
5162 hi = tree_low_cst (hi_index, 0),
5163 count = hi - lo + 1,
5164 (GET_CODE (target) != MEM
5166 || (host_integerp (TYPE_SIZE (elttype), 1)
5167 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5170 lo -= minelt; hi -= minelt;
5171 for (; lo <= hi; lo++)
5173 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5175 if (GET_CODE (target) == MEM
5176 && !MEM_KEEP_ALIAS_SET_P (target)
5177 && TREE_CODE (type) == ARRAY_TYPE
5178 && TYPE_NONALIASED_COMPONENT (type))
5180 target = copy_rtx (target);
5181 MEM_KEEP_ALIAS_SET_P (target) = 1;
5184 store_constructor_field
5185 (target, bitsize, bitpos, mode, value, type, cleared,
5186 get_alias_set (elttype));
5191 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5192 loop_end = gen_label_rtx ();
5194 unsignedp = TREE_UNSIGNED (domain);
5196 index = build_decl (VAR_DECL, NULL_TREE, domain);
5199 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5201 SET_DECL_RTL (index, index_r);
5202 if (TREE_CODE (value) == SAVE_EXPR
5203 && SAVE_EXPR_RTL (value) == 0)
5205 /* Make sure value gets expanded once before the
5207 expand_expr (value, const0_rtx, VOIDmode, 0);
5210 store_expr (lo_index, index_r, 0);
5211 loop = expand_start_loop (0);
5213 /* Assign value to element index. */
5215 = convert (ssizetype,
5216 fold (build (MINUS_EXPR, TREE_TYPE (index),
5217 index, TYPE_MIN_VALUE (domain))));
5218 position = size_binop (MULT_EXPR, position,
5220 TYPE_SIZE_UNIT (elttype)));
5222 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5223 xtarget = offset_address (target, pos_rtx,
5224 highest_pow2_factor (position));
5225 xtarget = adjust_address (xtarget, mode, 0);
5226 if (TREE_CODE (value) == CONSTRUCTOR)
5227 store_constructor (value, xtarget, cleared,
5228 bitsize / BITS_PER_UNIT);
5230 store_expr (value, xtarget, 0);
5232 expand_exit_loop_if_false (loop,
5233 build (LT_EXPR, integer_type_node,
5236 expand_increment (build (PREINCREMENT_EXPR,
5238 index, integer_one_node), 0, 0);
5240 emit_label (loop_end);
5243 else if ((index != 0 && ! host_integerp (index, 0))
5244 || ! host_integerp (TYPE_SIZE (elttype), 1))
5249 index = ssize_int (1);
5252 index = convert (ssizetype,
5253 fold (build (MINUS_EXPR, index,
5254 TYPE_MIN_VALUE (domain))));
5256 position = size_binop (MULT_EXPR, index,
5258 TYPE_SIZE_UNIT (elttype)));
5259 xtarget = offset_address (target,
5260 expand_expr (position, 0, VOIDmode, 0),
5261 highest_pow2_factor (position));
5262 xtarget = adjust_address (xtarget, mode, 0);
5263 store_expr (value, xtarget, 0);
5268 bitpos = ((tree_low_cst (index, 0) - minelt)
5269 * tree_low_cst (TYPE_SIZE (elttype), 1));
5271 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5273 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5274 && TREE_CODE (type) == ARRAY_TYPE
5275 && TYPE_NONALIASED_COMPONENT (type))
5277 target = copy_rtx (target);
5278 MEM_KEEP_ALIAS_SET_P (target) = 1;
5281 store_constructor_field (target, bitsize, bitpos, mode, value,
5282 type, cleared, get_alias_set (elttype));
5288 /* Set constructor assignments. */
5289 else if (TREE_CODE (type) == SET_TYPE)
5291 tree elt = CONSTRUCTOR_ELTS (exp);
5292 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5293 tree domain = TYPE_DOMAIN (type);
5294 tree domain_min, domain_max, bitlength;
5296 /* The default implementation strategy is to extract the constant
5297 parts of the constructor, use that to initialize the target,
5298 and then "or" in whatever non-constant ranges we need in addition.
5300 If a large set is all zero or all ones, it is
5301 probably better to set it using memset (if available) or bzero.
5302 Also, if a large set has just a single range, it may also be
5303 better to first clear all the first clear the set (using
5304 bzero/memset), and set the bits we want. */
5306 /* Check for all zeros. */
5307 if (elt == NULL_TREE && size > 0)
5310 clear_storage (target, GEN_INT (size));
5314 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5315 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5316 bitlength = size_binop (PLUS_EXPR,
5317 size_diffop (domain_max, domain_min),
5320 nbits = tree_low_cst (bitlength, 1);
5322 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5323 are "complicated" (more than one range), initialize (the
5324 constant parts) by copying from a constant. */
5325 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5326 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5328 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5329 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5330 char *bit_buffer = (char *) alloca (nbits);
5331 HOST_WIDE_INT word = 0;
5332 unsigned int bit_pos = 0;
5333 unsigned int ibit = 0;
5334 unsigned int offset = 0; /* In bytes from beginning of set. */
5336 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5339 if (bit_buffer[ibit])
5341 if (BYTES_BIG_ENDIAN)
5342 word |= (1 << (set_word_size - 1 - bit_pos));
5344 word |= 1 << bit_pos;
5348 if (bit_pos >= set_word_size || ibit == nbits)
5350 if (word != 0 || ! cleared)
5352 rtx datum = GEN_INT (word);
5355 /* The assumption here is that it is safe to use
5356 XEXP if the set is multi-word, but not if
5357 it's single-word. */
5358 if (GET_CODE (target) == MEM)
5359 to_rtx = adjust_address (target, mode, offset);
5360 else if (offset == 0)
5364 emit_move_insn (to_rtx, datum);
5371 offset += set_word_size / BITS_PER_UNIT;
5376 /* Don't bother clearing storage if the set is all ones. */
5377 if (TREE_CHAIN (elt) != NULL_TREE
5378 || (TREE_PURPOSE (elt) == NULL_TREE
5380 : ( ! host_integerp (TREE_VALUE (elt), 0)
5381 || ! host_integerp (TREE_PURPOSE (elt), 0)
5382 || (tree_low_cst (TREE_VALUE (elt), 0)
5383 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5384 != (HOST_WIDE_INT) nbits))))
5385 clear_storage (target, expr_size (exp));
5387 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5389 /* Start of range of element or NULL. */
5390 tree startbit = TREE_PURPOSE (elt);
5391 /* End of range of element, or element value. */
5392 tree endbit = TREE_VALUE (elt);
5393 HOST_WIDE_INT startb, endb;
5394 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5396 bitlength_rtx = expand_expr (bitlength,
5397 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5399 /* Handle non-range tuple element like [ expr ]. */
5400 if (startbit == NULL_TREE)
5402 startbit = save_expr (endbit);
5406 startbit = convert (sizetype, startbit);
5407 endbit = convert (sizetype, endbit);
5408 if (! integer_zerop (domain_min))
5410 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5411 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5413 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5414 EXPAND_CONST_ADDRESS);
5415 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5416 EXPAND_CONST_ADDRESS);
5422 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5423 (GET_MODE (target), 0),
5426 emit_move_insn (targetx, target);
5429 else if (GET_CODE (target) == MEM)
5434 /* Optimization: If startbit and endbit are constants divisible
5435 by BITS_PER_UNIT, call memset instead. */
5436 if (TARGET_MEM_FUNCTIONS
5437 && TREE_CODE (startbit) == INTEGER_CST
5438 && TREE_CODE (endbit) == INTEGER_CST
5439 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5440 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5442 emit_library_call (memset_libfunc, LCT_NORMAL,
5444 plus_constant (XEXP (targetx, 0),
5445 startb / BITS_PER_UNIT),
5447 constm1_rtx, TYPE_MODE (integer_type_node),
5448 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5449 TYPE_MODE (sizetype));
5452 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5453 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5454 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5455 startbit_rtx, TYPE_MODE (sizetype),
5456 endbit_rtx, TYPE_MODE (sizetype));
5459 emit_move_insn (target, targetx);
5467 /* Store the value of EXP (an expression tree)
5468 into a subfield of TARGET which has mode MODE and occupies
5469 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5470 If MODE is VOIDmode, it means that we are storing into a bit-field.
5472 If VALUE_MODE is VOIDmode, return nothing in particular.
5473 UNSIGNEDP is not used in this case.
5475 Otherwise, return an rtx for the value stored. This rtx
5476 has mode VALUE_MODE if that is convenient to do.
5477 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5479 TYPE is the type of the underlying object,
5481 ALIAS_SET is the alias set for the destination. This value will
5482 (in general) be different from that for TARGET, since TARGET is a
5483 reference to the containing structure. */
5486 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5489 HOST_WIDE_INT bitsize;
5490 HOST_WIDE_INT bitpos;
5491 enum machine_mode mode;
5493 enum machine_mode value_mode;
5498 HOST_WIDE_INT width_mask = 0;
5500 if (TREE_CODE (exp) == ERROR_MARK)
5503 /* If we have nothing to store, do nothing unless the expression has
5506 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5507 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5508 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5510 /* If we are storing into an unaligned field of an aligned union that is
5511 in a register, we may have the mode of TARGET being an integer mode but
5512 MODE == BLKmode. In that case, get an aligned object whose size and
5513 alignment are the same as TARGET and store TARGET into it (we can avoid
5514 the store if the field being stored is the entire width of TARGET). Then
5515 call ourselves recursively to store the field into a BLKmode version of
5516 that object. Finally, load from the object into TARGET. This is not
5517 very efficient in general, but should only be slightly more expensive
5518 than the otherwise-required unaligned accesses. Perhaps this can be
5519 cleaned up later. */
5522 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5526 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5528 rtx blk_object = adjust_address (object, BLKmode, 0);
5530 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5531 emit_move_insn (object, target);
5533 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5536 emit_move_insn (target, object);
5538 /* We want to return the BLKmode version of the data. */
5542 if (GET_CODE (target) == CONCAT)
5544 /* We're storing into a struct containing a single __complex. */
5548 return store_expr (exp, target, 0);
5551 /* If the structure is in a register or if the component
5552 is a bit field, we cannot use addressing to access it.
5553 Use bit-field techniques or SUBREG to store in it. */
5555 if (mode == VOIDmode
5556 || (mode != BLKmode && ! direct_store[(int) mode]
5557 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5558 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5559 || GET_CODE (target) == REG
5560 || GET_CODE (target) == SUBREG
5561 /* If the field isn't aligned enough to store as an ordinary memref,
5562 store it as a bit field. */
5563 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5564 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5565 || bitpos % GET_MODE_ALIGNMENT (mode)))
5566 /* If the RHS and field are a constant size and the size of the
5567 RHS isn't the same size as the bitfield, we must use bitfield
5570 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5571 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5573 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5575 /* If BITSIZE is narrower than the size of the type of EXP
5576 we will be narrowing TEMP. Normally, what's wanted are the
5577 low-order bits. However, if EXP's type is a record and this is
5578 big-endian machine, we want the upper BITSIZE bits. */
5579 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5580 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5581 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5582 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5583 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5587 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5589 if (mode != VOIDmode && mode != BLKmode
5590 && mode != TYPE_MODE (TREE_TYPE (exp)))
5591 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5593 /* If the modes of TARGET and TEMP are both BLKmode, both
5594 must be in memory and BITPOS must be aligned on a byte
5595 boundary. If so, we simply do a block copy. */
5596 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5598 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5599 || bitpos % BITS_PER_UNIT != 0)
5602 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5603 emit_block_move (target, temp,
5604 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5608 return value_mode == VOIDmode ? const0_rtx : target;
5611 /* Store the value in the bitfield. */
5612 store_bit_field (target, bitsize, bitpos, mode, temp,
5613 int_size_in_bytes (type));
5615 if (value_mode != VOIDmode)
5617 /* The caller wants an rtx for the value.
5618 If possible, avoid refetching from the bitfield itself. */
5620 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5623 enum machine_mode tmode;
5625 tmode = GET_MODE (temp);
5626 if (tmode == VOIDmode)
5630 return expand_and (tmode, temp,
5631 gen_int_mode (width_mask, tmode),
5634 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5635 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5636 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5639 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5640 NULL_RTX, value_mode, VOIDmode,
5641 int_size_in_bytes (type));
5647 rtx addr = XEXP (target, 0);
5648 rtx to_rtx = target;
5650 /* If a value is wanted, it must be the lhs;
5651 so make the address stable for multiple use. */
5653 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5654 && ! CONSTANT_ADDRESS_P (addr)
5655 /* A frame-pointer reference is already stable. */
5656 && ! (GET_CODE (addr) == PLUS
5657 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5658 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5659 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5660 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5662 /* Now build a reference to just the desired component. */
5664 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5666 if (to_rtx == target)
5667 to_rtx = copy_rtx (to_rtx);
5669 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5670 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5671 set_mem_alias_set (to_rtx, alias_set);
5673 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5677 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5678 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5679 codes and find the ultimate containing object, which we return.
5681 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5682 bit position, and *PUNSIGNEDP to the signedness of the field.
5683 If the position of the field is variable, we store a tree
5684 giving the variable offset (in units) in *POFFSET.
5685 This offset is in addition to the bit position.
5686 If the position is not variable, we store 0 in *POFFSET.
5688 If any of the extraction expressions is volatile,
5689 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5691 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5692 is a mode that can be used to access the field. In that case, *PBITSIZE
5695 If the field describes a variable-sized object, *PMODE is set to
5696 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5697 this case, but the address of the object can be found. */
5700 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5701 punsignedp, pvolatilep)
5703 HOST_WIDE_INT *pbitsize;
5704 HOST_WIDE_INT *pbitpos;
5706 enum machine_mode *pmode;
5711 enum machine_mode mode = VOIDmode;
5712 tree offset = size_zero_node;
5713 tree bit_offset = bitsize_zero_node;
5714 tree placeholder_ptr = 0;
5717 /* First get the mode, signedness, and size. We do this from just the
5718 outermost expression. */
5719 if (TREE_CODE (exp) == COMPONENT_REF)
5721 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5722 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5723 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5725 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5727 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5729 size_tree = TREE_OPERAND (exp, 1);
5730 *punsignedp = TREE_UNSIGNED (exp);
5734 mode = TYPE_MODE (TREE_TYPE (exp));
5735 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5737 if (mode == BLKmode)
5738 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5740 *pbitsize = GET_MODE_BITSIZE (mode);
5745 if (! host_integerp (size_tree, 1))
5746 mode = BLKmode, *pbitsize = -1;
5748 *pbitsize = tree_low_cst (size_tree, 1);
5751 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5752 and find the ultimate containing object. */
5755 if (TREE_CODE (exp) == BIT_FIELD_REF)
5756 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5757 else if (TREE_CODE (exp) == COMPONENT_REF)
5759 tree field = TREE_OPERAND (exp, 1);
5760 tree this_offset = DECL_FIELD_OFFSET (field);
5762 /* If this field hasn't been filled in yet, don't go
5763 past it. This should only happen when folding expressions
5764 made during type construction. */
5765 if (this_offset == 0)
5767 else if (! TREE_CONSTANT (this_offset)
5768 && contains_placeholder_p (this_offset))
5769 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5771 offset = size_binop (PLUS_EXPR, offset, this_offset);
5772 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5773 DECL_FIELD_BIT_OFFSET (field));
5775 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5778 else if (TREE_CODE (exp) == ARRAY_REF
5779 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5781 tree index = TREE_OPERAND (exp, 1);
5782 tree array = TREE_OPERAND (exp, 0);
5783 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5784 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5785 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5787 /* We assume all arrays have sizes that are a multiple of a byte.
5788 First subtract the lower bound, if any, in the type of the
5789 index, then convert to sizetype and multiply by the size of the
5791 if (low_bound != 0 && ! integer_zerop (low_bound))
5792 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5795 /* If the index has a self-referential type, pass it to a
5796 WITH_RECORD_EXPR; if the component size is, pass our
5797 component to one. */
5798 if (! TREE_CONSTANT (index)
5799 && contains_placeholder_p (index))
5800 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5801 if (! TREE_CONSTANT (unit_size)
5802 && contains_placeholder_p (unit_size))
5803 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5805 offset = size_binop (PLUS_EXPR, offset,
5806 size_binop (MULT_EXPR,
5807 convert (sizetype, index),
5811 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5813 tree new = find_placeholder (exp, &placeholder_ptr);
5815 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5816 We might have been called from tree optimization where we
5817 haven't set up an object yet. */
5825 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5826 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5827 && ! ((TREE_CODE (exp) == NOP_EXPR
5828 || TREE_CODE (exp) == CONVERT_EXPR)
5829 && (TYPE_MODE (TREE_TYPE (exp))
5830 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5833 /* If any reference in the chain is volatile, the effect is volatile. */
5834 if (TREE_THIS_VOLATILE (exp))
5837 exp = TREE_OPERAND (exp, 0);
5840 /* If OFFSET is constant, see if we can return the whole thing as a
5841 constant bit position. Otherwise, split it up. */
5842 if (host_integerp (offset, 0)
5843 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5845 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5846 && host_integerp (tem, 0))
5847 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5849 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5855 /* Return 1 if T is an expression that get_inner_reference handles. */
5858 handled_component_p (t)
5861 switch (TREE_CODE (t))
5866 case ARRAY_RANGE_REF:
5867 case NON_LVALUE_EXPR:
5868 case VIEW_CONVERT_EXPR:
5873 return (TYPE_MODE (TREE_TYPE (t))
5874 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5881 /* Given an rtx VALUE that may contain additions and multiplications, return
5882 an equivalent value that just refers to a register, memory, or constant.
5883 This is done by generating instructions to perform the arithmetic and
5884 returning a pseudo-register containing the value.
5886 The returned value may be a REG, SUBREG, MEM or constant. */
5889 force_operand (value, target)
5893 /* Use subtarget as the target for operand 0 of a binary operation. */
5894 rtx subtarget = get_subtarget (target);
5895 enum rtx_code code = GET_CODE (value);
5897 /* Check for a PIC address load. */
5898 if ((code == PLUS || code == MINUS)
5899 && XEXP (value, 0) == pic_offset_table_rtx
5900 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5901 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5902 || GET_CODE (XEXP (value, 1)) == CONST))
5905 subtarget = gen_reg_rtx (GET_MODE (value));
5906 emit_move_insn (subtarget, value);
5910 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5913 target = gen_reg_rtx (GET_MODE (value));
5914 convert_move (target, force_operand (XEXP (value, 0), NULL),
5915 code == ZERO_EXTEND);
5919 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5921 op2 = XEXP (value, 1);
5922 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5924 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5927 op2 = negate_rtx (GET_MODE (value), op2);
5930 /* Check for an addition with OP2 a constant integer and our first
5931 operand a PLUS of a virtual register and something else. In that
5932 case, we want to emit the sum of the virtual register and the
5933 constant first and then add the other value. This allows virtual
5934 register instantiation to simply modify the constant rather than
5935 creating another one around this addition. */
5936 if (code == PLUS && GET_CODE (op2) == CONST_INT
5937 && GET_CODE (XEXP (value, 0)) == PLUS
5938 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5939 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5940 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5942 rtx temp = expand_simple_binop (GET_MODE (value), code,
5943 XEXP (XEXP (value, 0), 0), op2,
5944 subtarget, 0, OPTAB_LIB_WIDEN);
5945 return expand_simple_binop (GET_MODE (value), code, temp,
5946 force_operand (XEXP (XEXP (value,
5948 target, 0, OPTAB_LIB_WIDEN);
5951 op1 = force_operand (XEXP (value, 0), subtarget);
5952 op2 = force_operand (op2, NULL_RTX);
5956 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5958 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5959 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5960 target, 1, OPTAB_LIB_WIDEN);
5962 return expand_divmod (0,
5963 FLOAT_MODE_P (GET_MODE (value))
5964 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5965 GET_MODE (value), op1, op2, target, 0);
5968 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5972 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5976 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5980 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5981 target, 0, OPTAB_LIB_WIDEN);
5984 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5985 target, 1, OPTAB_LIB_WIDEN);
5988 if (GET_RTX_CLASS (code) == '1')
5990 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5991 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5994 #ifdef INSN_SCHEDULING
5995 /* On machines that have insn scheduling, we want all memory reference to be
5996 explicit, so we need to deal with such paradoxical SUBREGs. */
5997 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5998 && (GET_MODE_SIZE (GET_MODE (value))
5999 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6001 = simplify_gen_subreg (GET_MODE (value),
6002 force_reg (GET_MODE (SUBREG_REG (value)),
6003 force_operand (SUBREG_REG (value),
6005 GET_MODE (SUBREG_REG (value)),
6006 SUBREG_BYTE (value));
6012 /* Subroutine of expand_expr: return nonzero iff there is no way that
6013 EXP can reference X, which is being modified. TOP_P is nonzero if this
6014 call is going to be used to determine whether we need a temporary
6015 for EXP, as opposed to a recursive call to this function.
6017 It is always safe for this routine to return zero since it merely
6018 searches for optimization opportunities. */
6021 safe_from_p (x, exp, top_p)
6028 static tree save_expr_list;
6031 /* If EXP has varying size, we MUST use a target since we currently
6032 have no way of allocating temporaries of variable size
6033 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6034 So we assume here that something at a higher level has prevented a
6035 clash. This is somewhat bogus, but the best we can do. Only
6036 do this when X is BLKmode and when we are at the top level. */
6037 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6038 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6039 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6040 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6041 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6043 && GET_MODE (x) == BLKmode)
6044 /* If X is in the outgoing argument area, it is always safe. */
6045 || (GET_CODE (x) == MEM
6046 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6047 || (GET_CODE (XEXP (x, 0)) == PLUS
6048 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6051 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6052 find the underlying pseudo. */
6053 if (GET_CODE (x) == SUBREG)
6056 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6060 /* A SAVE_EXPR might appear many times in the expression passed to the
6061 top-level safe_from_p call, and if it has a complex subexpression,
6062 examining it multiple times could result in a combinatorial explosion.
6063 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6064 with optimization took about 28 minutes to compile -- even though it was
6065 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6066 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6067 we have processed. Note that the only test of top_p was above. */
6076 rtn = safe_from_p (x, exp, 0);
6078 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6079 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6084 /* Now look at our tree code and possibly recurse. */
6085 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6088 exp_rtl = DECL_RTL_IF_SET (exp);
6095 if (TREE_CODE (exp) == TREE_LIST)
6096 return ((TREE_VALUE (exp) == 0
6097 || safe_from_p (x, TREE_VALUE (exp), 0))
6098 && (TREE_CHAIN (exp) == 0
6099 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6100 else if (TREE_CODE (exp) == ERROR_MARK)
6101 return 1; /* An already-visited SAVE_EXPR? */
6106 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6110 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6111 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6115 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6116 the expression. If it is set, we conflict iff we are that rtx or
6117 both are in memory. Otherwise, we check all operands of the
6118 expression recursively. */
6120 switch (TREE_CODE (exp))
6123 /* If the operand is static or we are static, we can't conflict.
6124 Likewise if we don't conflict with the operand at all. */
6125 if (staticp (TREE_OPERAND (exp, 0))
6126 || TREE_STATIC (exp)
6127 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6130 /* Otherwise, the only way this can conflict is if we are taking
6131 the address of a DECL a that address if part of X, which is
6133 exp = TREE_OPERAND (exp, 0);
6136 if (!DECL_RTL_SET_P (exp)
6137 || GET_CODE (DECL_RTL (exp)) != MEM)
6140 exp_rtl = XEXP (DECL_RTL (exp), 0);
6145 if (GET_CODE (x) == MEM
6146 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6147 get_alias_set (exp)))
6152 /* Assume that the call will clobber all hard registers and
6154 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6155 || GET_CODE (x) == MEM)
6160 /* If a sequence exists, we would have to scan every instruction
6161 in the sequence to see if it was safe. This is probably not
6163 if (RTL_EXPR_SEQUENCE (exp))
6166 exp_rtl = RTL_EXPR_RTL (exp);
6169 case WITH_CLEANUP_EXPR:
6170 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6173 case CLEANUP_POINT_EXPR:
6174 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6177 exp_rtl = SAVE_EXPR_RTL (exp);
6181 /* If we've already scanned this, don't do it again. Otherwise,
6182 show we've scanned it and record for clearing the flag if we're
6184 if (TREE_PRIVATE (exp))
6187 TREE_PRIVATE (exp) = 1;
6188 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6190 TREE_PRIVATE (exp) = 0;
6194 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6198 /* The only operand we look at is operand 1. The rest aren't
6199 part of the expression. */
6200 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6202 case METHOD_CALL_EXPR:
6203 /* This takes an rtx argument, but shouldn't appear here. */
6210 /* If we have an rtx, we do not need to scan our operands. */
6214 nops = first_rtl_op (TREE_CODE (exp));
6215 for (i = 0; i < nops; i++)
6216 if (TREE_OPERAND (exp, i) != 0
6217 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6220 /* If this is a language-specific tree code, it may require
6221 special handling. */
6222 if ((unsigned int) TREE_CODE (exp)
6223 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6224 && !(*lang_hooks.safe_from_p) (x, exp))
6228 /* If we have an rtl, find any enclosed object. Then see if we conflict
6232 if (GET_CODE (exp_rtl) == SUBREG)
6234 exp_rtl = SUBREG_REG (exp_rtl);
6235 if (GET_CODE (exp_rtl) == REG
6236 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6240 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6241 are memory and they conflict. */
6242 return ! (rtx_equal_p (x, exp_rtl)
6243 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6244 && true_dependence (exp_rtl, VOIDmode, x,
6245 rtx_addr_varies_p)));
6248 /* If we reach here, it is safe. */
6252 /* Subroutine of expand_expr: return rtx if EXP is a
6253 variable or parameter; else return 0. */
6260 switch (TREE_CODE (exp))
6264 return DECL_RTL (exp);
6270 #ifdef MAX_INTEGER_COMPUTATION_MODE
6273 check_max_integer_computation_mode (exp)
6276 enum tree_code code;
6277 enum machine_mode mode;
6279 /* Strip any NOPs that don't change the mode. */
6281 code = TREE_CODE (exp);
6283 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6284 if (code == NOP_EXPR
6285 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6288 /* First check the type of the overall operation. We need only look at
6289 unary, binary and relational operations. */
6290 if (TREE_CODE_CLASS (code) == '1'
6291 || TREE_CODE_CLASS (code) == '2'
6292 || TREE_CODE_CLASS (code) == '<')
6294 mode = TYPE_MODE (TREE_TYPE (exp));
6295 if (GET_MODE_CLASS (mode) == MODE_INT
6296 && mode > MAX_INTEGER_COMPUTATION_MODE)
6297 internal_error ("unsupported wide integer operation");
6300 /* Check operand of a unary op. */
6301 if (TREE_CODE_CLASS (code) == '1')
6303 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6304 if (GET_MODE_CLASS (mode) == MODE_INT
6305 && mode > MAX_INTEGER_COMPUTATION_MODE)
6306 internal_error ("unsupported wide integer operation");
6309 /* Check operands of a binary/comparison op. */
6310 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6312 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6313 if (GET_MODE_CLASS (mode) == MODE_INT
6314 && mode > MAX_INTEGER_COMPUTATION_MODE)
6315 internal_error ("unsupported wide integer operation");
6317 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6318 if (GET_MODE_CLASS (mode) == MODE_INT
6319 && mode > MAX_INTEGER_COMPUTATION_MODE)
6320 internal_error ("unsupported wide integer operation");
6325 /* Return the highest power of two that EXP is known to be a multiple of.
6326 This is used in updating alignment of MEMs in array references. */
6328 static HOST_WIDE_INT
6329 highest_pow2_factor (exp)
6332 HOST_WIDE_INT c0, c1;
6334 switch (TREE_CODE (exp))
6337 /* We can find the lowest bit that's a one. If the low
6338 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6339 We need to handle this case since we can find it in a COND_EXPR,
6340 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6341 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6343 if (TREE_CONSTANT_OVERFLOW (exp))
6344 return BIGGEST_ALIGNMENT;
6347 /* Note: tree_low_cst is intentionally not used here,
6348 we don't care about the upper bits. */
6349 c0 = TREE_INT_CST_LOW (exp);
6351 return c0 ? c0 : BIGGEST_ALIGNMENT;
6355 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6356 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6357 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6358 return MIN (c0, c1);
6361 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6362 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6365 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6367 if (integer_pow2p (TREE_OPERAND (exp, 1))
6368 && host_integerp (TREE_OPERAND (exp, 1), 1))
6370 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6371 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6372 return MAX (1, c0 / c1);
6376 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6377 case SAVE_EXPR: case WITH_RECORD_EXPR:
6378 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6381 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6384 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6385 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6386 return MIN (c0, c1);
6395 /* Similar, except that it is known that the expression must be a multiple
6396 of the alignment of TYPE. */
6398 static HOST_WIDE_INT
6399 highest_pow2_factor_for_type (type, exp)
6403 HOST_WIDE_INT type_align, factor;
6405 factor = highest_pow2_factor (exp);
6406 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6407 return MAX (factor, type_align);
6410 /* Return an object on the placeholder list that matches EXP, a
6411 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6412 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6413 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6414 is a location which initially points to a starting location in the
6415 placeholder list (zero means start of the list) and where a pointer into
6416 the placeholder list at which the object is found is placed. */
6419 find_placeholder (exp, plist)
6423 tree type = TREE_TYPE (exp);
6424 tree placeholder_expr;
6426 for (placeholder_expr
6427 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6428 placeholder_expr != 0;
6429 placeholder_expr = TREE_CHAIN (placeholder_expr))
6431 tree need_type = TYPE_MAIN_VARIANT (type);
6434 /* Find the outermost reference that is of the type we want. If none,
6435 see if any object has a type that is a pointer to the type we
6437 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6438 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6439 || TREE_CODE (elt) == COND_EXPR)
6440 ? TREE_OPERAND (elt, 1)
6441 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6442 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6443 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6444 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6445 ? TREE_OPERAND (elt, 0) : 0))
6446 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6449 *plist = placeholder_expr;
6453 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6455 = ((TREE_CODE (elt) == COMPOUND_EXPR
6456 || TREE_CODE (elt) == COND_EXPR)
6457 ? TREE_OPERAND (elt, 1)
6458 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6459 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6460 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6461 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6462 ? TREE_OPERAND (elt, 0) : 0))
6463 if (POINTER_TYPE_P (TREE_TYPE (elt))
6464 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6468 *plist = placeholder_expr;
6469 return build1 (INDIRECT_REF, need_type, elt);
6476 /* expand_expr: generate code for computing expression EXP.
6477 An rtx for the computed value is returned. The value is never null.
6478 In the case of a void EXP, const0_rtx is returned.
6480 The value may be stored in TARGET if TARGET is nonzero.
6481 TARGET is just a suggestion; callers must assume that
6482 the rtx returned may not be the same as TARGET.
6484 If TARGET is CONST0_RTX, it means that the value will be ignored.
6486 If TMODE is not VOIDmode, it suggests generating the
6487 result in mode TMODE. But this is done only when convenient.
6488 Otherwise, TMODE is ignored and the value generated in its natural mode.
6489 TMODE is just a suggestion; callers must assume that
6490 the rtx returned may not have mode TMODE.
6492 Note that TARGET may have neither TMODE nor MODE. In that case, it
6493 probably will not be used.
6495 If MODIFIER is EXPAND_SUM then when EXP is an addition
6496 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6497 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6498 products as above, or REG or MEM, or constant.
6499 Ordinarily in such cases we would output mul or add instructions
6500 and then return a pseudo reg containing the sum.
6502 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6503 it also marks a label as absolutely required (it can't be dead).
6504 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6505 This is used for outputting expressions used in initializers.
6507 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6508 with a constant address even if that address is not normally legitimate.
6509 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6511 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6512 a call parameter. Such targets require special care as we haven't yet
6513 marked TARGET so that it's safe from being trashed by libcalls. We
6514 don't want to use TARGET for anything but the final result;
6515 Intermediate values must go elsewhere. Additionally, calls to
6516 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6519 expand_expr (exp, target, tmode, modifier)
6522 enum machine_mode tmode;
6523 enum expand_modifier modifier;
6526 tree type = TREE_TYPE (exp);
6527 int unsignedp = TREE_UNSIGNED (type);
6528 enum machine_mode mode;
6529 enum tree_code code = TREE_CODE (exp);
6531 rtx subtarget, original_target;
6535 /* Handle ERROR_MARK before anybody tries to access its type. */
6536 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6538 op0 = CONST0_RTX (tmode);
6544 mode = TYPE_MODE (type);
6545 /* Use subtarget as the target for operand 0 of a binary operation. */
6546 subtarget = get_subtarget (target);
6547 original_target = target;
6548 ignore = (target == const0_rtx
6549 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6550 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6551 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6552 && TREE_CODE (type) == VOID_TYPE));
6554 /* If we are going to ignore this result, we need only do something
6555 if there is a side-effect somewhere in the expression. If there
6556 is, short-circuit the most common cases here. Note that we must
6557 not call expand_expr with anything but const0_rtx in case this
6558 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6562 if (! TREE_SIDE_EFFECTS (exp))
6565 /* Ensure we reference a volatile object even if value is ignored, but
6566 don't do this if all we are doing is taking its address. */
6567 if (TREE_THIS_VOLATILE (exp)
6568 && TREE_CODE (exp) != FUNCTION_DECL
6569 && mode != VOIDmode && mode != BLKmode
6570 && modifier != EXPAND_CONST_ADDRESS)
6572 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6573 if (GET_CODE (temp) == MEM)
6574 temp = copy_to_reg (temp);
6578 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6579 || code == INDIRECT_REF || code == BUFFER_REF)
6580 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6583 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6584 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6586 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6587 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6590 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6592 /* If the second operand has no side effects, just evaluate
6594 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6596 else if (code == BIT_FIELD_REF)
6598 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6599 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6600 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6607 #ifdef MAX_INTEGER_COMPUTATION_MODE
6608 /* Only check stuff here if the mode we want is different from the mode
6609 of the expression; if it's the same, check_max_integer_computation_mode
6610 will handle it. Do we really need to check this stuff at all? */
6613 && GET_MODE (target) != mode
6614 && TREE_CODE (exp) != INTEGER_CST
6615 && TREE_CODE (exp) != PARM_DECL
6616 && TREE_CODE (exp) != ARRAY_REF
6617 && TREE_CODE (exp) != ARRAY_RANGE_REF
6618 && TREE_CODE (exp) != COMPONENT_REF
6619 && TREE_CODE (exp) != BIT_FIELD_REF
6620 && TREE_CODE (exp) != INDIRECT_REF
6621 && TREE_CODE (exp) != CALL_EXPR
6622 && TREE_CODE (exp) != VAR_DECL
6623 && TREE_CODE (exp) != RTL_EXPR)
6625 enum machine_mode mode = GET_MODE (target);
6627 if (GET_MODE_CLASS (mode) == MODE_INT
6628 && mode > MAX_INTEGER_COMPUTATION_MODE)
6629 internal_error ("unsupported wide integer operation");
6633 && TREE_CODE (exp) != INTEGER_CST
6634 && TREE_CODE (exp) != PARM_DECL
6635 && TREE_CODE (exp) != ARRAY_REF
6636 && TREE_CODE (exp) != ARRAY_RANGE_REF
6637 && TREE_CODE (exp) != COMPONENT_REF
6638 && TREE_CODE (exp) != BIT_FIELD_REF
6639 && TREE_CODE (exp) != INDIRECT_REF
6640 && TREE_CODE (exp) != VAR_DECL
6641 && TREE_CODE (exp) != CALL_EXPR
6642 && TREE_CODE (exp) != RTL_EXPR
6643 && GET_MODE_CLASS (tmode) == MODE_INT
6644 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6645 internal_error ("unsupported wide integer operation");
6647 check_max_integer_computation_mode (exp);
6650 /* If will do cse, generate all results into pseudo registers
6651 since 1) that allows cse to find more things
6652 and 2) otherwise cse could produce an insn the machine
6653 cannot support. An exception is a CONSTRUCTOR into a multi-word
6654 MEM: that's much more likely to be most efficient into the MEM.
6655 Another is a CALL_EXPR which must return in memory. */
6657 if (! cse_not_expected && mode != BLKmode && target
6658 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6659 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6660 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6667 tree function = decl_function_context (exp);
6668 /* Handle using a label in a containing function. */
6669 if (function != current_function_decl
6670 && function != inline_function_decl && function != 0)
6672 struct function *p = find_function_data (function);
6673 p->expr->x_forced_labels
6674 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6675 p->expr->x_forced_labels);
6679 if (modifier == EXPAND_INITIALIZER)
6680 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6685 temp = gen_rtx_MEM (FUNCTION_MODE,
6686 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6687 if (function != current_function_decl
6688 && function != inline_function_decl && function != 0)
6689 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6694 if (!DECL_RTL_SET_P (exp))
6696 error_with_decl (exp, "prior parameter's size depends on `%s'");
6697 return CONST0_RTX (mode);
6700 /* ... fall through ... */
6703 /* If a static var's type was incomplete when the decl was written,
6704 but the type is complete now, lay out the decl now. */
6705 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6706 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6708 rtx value = DECL_RTL_IF_SET (exp);
6710 layout_decl (exp, 0);
6712 /* If the RTL was already set, update its mode and memory
6716 PUT_MODE (value, DECL_MODE (exp));
6717 SET_DECL_RTL (exp, 0);
6718 set_mem_attributes (value, exp, 1);
6719 SET_DECL_RTL (exp, value);
6723 /* ... fall through ... */
6727 if (DECL_RTL (exp) == 0)
6730 /* Ensure variable marked as used even if it doesn't go through
6731 a parser. If it hasn't be used yet, write out an external
6733 if (! TREE_USED (exp))
6735 assemble_external (exp);
6736 TREE_USED (exp) = 1;
6739 /* Show we haven't gotten RTL for this yet. */
6742 /* Handle variables inherited from containing functions. */
6743 context = decl_function_context (exp);
6745 /* We treat inline_function_decl as an alias for the current function
6746 because that is the inline function whose vars, types, etc.
6747 are being merged into the current function.
6748 See expand_inline_function. */
6750 if (context != 0 && context != current_function_decl
6751 && context != inline_function_decl
6752 /* If var is static, we don't need a static chain to access it. */
6753 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6754 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6758 /* Mark as non-local and addressable. */
6759 DECL_NONLOCAL (exp) = 1;
6760 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6762 (*lang_hooks.mark_addressable) (exp);
6763 if (GET_CODE (DECL_RTL (exp)) != MEM)
6765 addr = XEXP (DECL_RTL (exp), 0);
6766 if (GET_CODE (addr) == MEM)
6768 = replace_equiv_address (addr,
6769 fix_lexical_addr (XEXP (addr, 0), exp));
6771 addr = fix_lexical_addr (addr, exp);
6773 temp = replace_equiv_address (DECL_RTL (exp), addr);
6776 /* This is the case of an array whose size is to be determined
6777 from its initializer, while the initializer is still being parsed.
6780 else if (GET_CODE (DECL_RTL (exp)) == MEM
6781 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6782 temp = validize_mem (DECL_RTL (exp));
6784 /* If DECL_RTL is memory, we are in the normal case and either
6785 the address is not valid or it is not a register and -fforce-addr
6786 is specified, get the address into a register. */
6788 else if (GET_CODE (DECL_RTL (exp)) == MEM
6789 && modifier != EXPAND_CONST_ADDRESS
6790 && modifier != EXPAND_SUM
6791 && modifier != EXPAND_INITIALIZER
6792 && (! memory_address_p (DECL_MODE (exp),
6793 XEXP (DECL_RTL (exp), 0))
6795 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6796 temp = replace_equiv_address (DECL_RTL (exp),
6797 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6799 /* If we got something, return it. But first, set the alignment
6800 if the address is a register. */
6803 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6804 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6809 /* If the mode of DECL_RTL does not match that of the decl, it
6810 must be a promoted value. We return a SUBREG of the wanted mode,
6811 but mark it so that we know that it was already extended. */
6813 if (GET_CODE (DECL_RTL (exp)) == REG
6814 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6816 /* Get the signedness used for this variable. Ensure we get the
6817 same mode we got when the variable was declared. */
6818 if (GET_MODE (DECL_RTL (exp))
6819 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6820 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6823 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6824 SUBREG_PROMOTED_VAR_P (temp) = 1;
6825 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6829 return DECL_RTL (exp);
6832 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6833 TREE_INT_CST_HIGH (exp), mode);
6835 /* ??? If overflow is set, fold will have done an incomplete job,
6836 which can result in (plus xx (const_int 0)), which can get
6837 simplified by validate_replace_rtx during virtual register
6838 instantiation, which can result in unrecognizable insns.
6839 Avoid this by forcing all overflows into registers. */
6840 if (TREE_CONSTANT_OVERFLOW (exp)
6841 && modifier != EXPAND_INITIALIZER)
6842 temp = force_reg (mode, temp);
6847 return const_vector_from_tree (exp);
6850 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6853 /* If optimized, generate immediate CONST_DOUBLE
6854 which will be turned into memory by reload if necessary.
6856 We used to force a register so that loop.c could see it. But
6857 this does not allow gen_* patterns to perform optimizations with
6858 the constants. It also produces two insns in cases like "x = 1.0;".
6859 On most machines, floating-point constants are not permitted in
6860 many insns, so we'd end up copying it to a register in any case.
6862 Now, we do the copying in expand_binop, if appropriate. */
6863 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6864 TYPE_MODE (TREE_TYPE (exp)));
6868 if (! TREE_CST_RTL (exp))
6869 output_constant_def (exp, 1);
6871 /* TREE_CST_RTL probably contains a constant address.
6872 On RISC machines where a constant address isn't valid,
6873 make some insns to get that address into a register. */
6874 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6875 && modifier != EXPAND_CONST_ADDRESS
6876 && modifier != EXPAND_INITIALIZER
6877 && modifier != EXPAND_SUM
6878 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6880 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6881 return replace_equiv_address (TREE_CST_RTL (exp),
6882 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6883 return TREE_CST_RTL (exp);
6885 case EXPR_WITH_FILE_LOCATION:
6888 const char *saved_input_filename = input_filename;
6889 int saved_lineno = lineno;
6890 input_filename = EXPR_WFL_FILENAME (exp);
6891 lineno = EXPR_WFL_LINENO (exp);
6892 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6893 emit_line_note (input_filename, lineno);
6894 /* Possibly avoid switching back and forth here. */
6895 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6896 input_filename = saved_input_filename;
6897 lineno = saved_lineno;
6902 context = decl_function_context (exp);
6904 /* If this SAVE_EXPR was at global context, assume we are an
6905 initialization function and move it into our context. */
6907 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6909 /* We treat inline_function_decl as an alias for the current function
6910 because that is the inline function whose vars, types, etc.
6911 are being merged into the current function.
6912 See expand_inline_function. */
6913 if (context == current_function_decl || context == inline_function_decl)
6916 /* If this is non-local, handle it. */
6919 /* The following call just exists to abort if the context is
6920 not of a containing function. */
6921 find_function_data (context);
6923 temp = SAVE_EXPR_RTL (exp);
6924 if (temp && GET_CODE (temp) == REG)
6926 put_var_into_stack (exp, /*rescan=*/true);
6927 temp = SAVE_EXPR_RTL (exp);
6929 if (temp == 0 || GET_CODE (temp) != MEM)
6932 replace_equiv_address (temp,
6933 fix_lexical_addr (XEXP (temp, 0), exp));
6935 if (SAVE_EXPR_RTL (exp) == 0)
6937 if (mode == VOIDmode)
6940 temp = assign_temp (build_qualified_type (type,
6942 | TYPE_QUAL_CONST)),
6945 SAVE_EXPR_RTL (exp) = temp;
6946 if (!optimize && GET_CODE (temp) == REG)
6947 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6950 /* If the mode of TEMP does not match that of the expression, it
6951 must be a promoted value. We pass store_expr a SUBREG of the
6952 wanted mode but mark it so that we know that it was already
6955 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6957 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6958 promote_mode (type, mode, &unsignedp, 0);
6959 SUBREG_PROMOTED_VAR_P (temp) = 1;
6960 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6963 if (temp == const0_rtx)
6964 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6966 store_expr (TREE_OPERAND (exp, 0), temp,
6967 modifier == EXPAND_STACK_PARM ? 2 : 0);
6969 TREE_USED (exp) = 1;
6972 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6973 must be a promoted value. We return a SUBREG of the wanted mode,
6974 but mark it so that we know that it was already extended. */
6976 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6977 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6979 /* Compute the signedness and make the proper SUBREG. */
6980 promote_mode (type, mode, &unsignedp, 0);
6981 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6982 SUBREG_PROMOTED_VAR_P (temp) = 1;
6983 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6987 return SAVE_EXPR_RTL (exp);
6992 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6993 TREE_OPERAND (exp, 0)
6994 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6998 case PLACEHOLDER_EXPR:
7000 tree old_list = placeholder_list;
7001 tree placeholder_expr = 0;
7003 exp = find_placeholder (exp, &placeholder_expr);
7007 placeholder_list = TREE_CHAIN (placeholder_expr);
7008 temp = expand_expr (exp, original_target, tmode, modifier);
7009 placeholder_list = old_list;
7013 case WITH_RECORD_EXPR:
7014 /* Put the object on the placeholder list, expand our first operand,
7015 and pop the list. */
7016 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7018 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7020 placeholder_list = TREE_CHAIN (placeholder_list);
7024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7025 expand_goto (TREE_OPERAND (exp, 0));
7027 expand_computed_goto (TREE_OPERAND (exp, 0));
7031 expand_exit_loop_if_false (NULL,
7032 invert_truthvalue (TREE_OPERAND (exp, 0)));
7035 case LABELED_BLOCK_EXPR:
7036 if (LABELED_BLOCK_BODY (exp))
7037 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7038 /* Should perhaps use expand_label, but this is simpler and safer. */
7039 do_pending_stack_adjust ();
7040 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7043 case EXIT_BLOCK_EXPR:
7044 if (EXIT_BLOCK_RETURN (exp))
7045 sorry ("returned value in block_exit_expr");
7046 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7051 expand_start_loop (1);
7052 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7060 tree vars = TREE_OPERAND (exp, 0);
7062 /* Need to open a binding contour here because
7063 if there are any cleanups they must be contained here. */
7064 expand_start_bindings (2);
7066 /* Mark the corresponding BLOCK for output in its proper place. */
7067 if (TREE_OPERAND (exp, 2) != 0
7068 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7069 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7071 /* If VARS have not yet been expanded, expand them now. */
7074 if (!DECL_RTL_SET_P (vars))
7076 expand_decl_init (vars);
7077 vars = TREE_CHAIN (vars);
7080 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7082 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7088 if (RTL_EXPR_SEQUENCE (exp))
7090 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7092 emit_insn (RTL_EXPR_SEQUENCE (exp));
7093 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7095 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7096 free_temps_for_rtl_expr (exp);
7097 return RTL_EXPR_RTL (exp);
7100 /* If we don't need the result, just ensure we evaluate any
7106 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7107 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7112 /* All elts simple constants => refer to a constant in memory. But
7113 if this is a non-BLKmode mode, let it store a field at a time
7114 since that should make a CONST_INT or CONST_DOUBLE when we
7115 fold. Likewise, if we have a target we can use, it is best to
7116 store directly into the target unless the type is large enough
7117 that memcpy will be used. If we are making an initializer and
7118 all operands are constant, put it in memory as well.
7120 FIXME: Avoid trying to fill vector constructors piece-meal.
7121 Output them with output_constant_def below unless we're sure
7122 they're zeros. This should go away when vector initializers
7123 are treated like VECTOR_CST instead of arrays.
7125 else if ((TREE_STATIC (exp)
7126 && ((mode == BLKmode
7127 && ! (target != 0 && safe_from_p (target, exp, 1)))
7128 || TREE_ADDRESSABLE (exp)
7129 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7130 && (! MOVE_BY_PIECES_P
7131 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7133 && ((TREE_CODE (type) == VECTOR_TYPE
7134 && !is_zeros_p (exp))
7135 || ! mostly_zeros_p (exp)))))
7136 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7138 rtx constructor = output_constant_def (exp, 1);
7140 if (modifier != EXPAND_CONST_ADDRESS
7141 && modifier != EXPAND_INITIALIZER
7142 && modifier != EXPAND_SUM)
7143 constructor = validize_mem (constructor);
7149 /* Handle calls that pass values in multiple non-contiguous
7150 locations. The Irix 6 ABI has examples of this. */
7151 if (target == 0 || ! safe_from_p (target, exp, 1)
7152 || GET_CODE (target) == PARALLEL
7153 || modifier == EXPAND_STACK_PARM)
7155 = assign_temp (build_qualified_type (type,
7157 | (TREE_READONLY (exp)
7158 * TYPE_QUAL_CONST))),
7159 0, TREE_ADDRESSABLE (exp), 1);
7161 store_constructor (exp, target, 0, int_expr_size (exp));
7167 tree exp1 = TREE_OPERAND (exp, 0);
7169 tree string = string_constant (exp1, &index);
7171 /* Try to optimize reads from const strings. */
7173 && TREE_CODE (string) == STRING_CST
7174 && TREE_CODE (index) == INTEGER_CST
7175 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7176 && GET_MODE_CLASS (mode) == MODE_INT
7177 && GET_MODE_SIZE (mode) == 1
7178 && modifier != EXPAND_WRITE)
7179 return gen_int_mode (TREE_STRING_POINTER (string)
7180 [TREE_INT_CST_LOW (index)], mode);
7182 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7183 op0 = memory_address (mode, op0);
7184 temp = gen_rtx_MEM (mode, op0);
7185 set_mem_attributes (temp, exp, 0);
7187 /* If we are writing to this object and its type is a record with
7188 readonly fields, we must mark it as readonly so it will
7189 conflict with readonly references to those fields. */
7190 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7191 RTX_UNCHANGING_P (temp) = 1;
7197 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7201 tree array = TREE_OPERAND (exp, 0);
7202 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7203 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7204 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7207 /* Optimize the special-case of a zero lower bound.
7209 We convert the low_bound to sizetype to avoid some problems
7210 with constant folding. (E.g. suppose the lower bound is 1,
7211 and its mode is QI. Without the conversion, (ARRAY
7212 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7213 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7215 if (! integer_zerop (low_bound))
7216 index = size_diffop (index, convert (sizetype, low_bound));
7218 /* Fold an expression like: "foo"[2].
7219 This is not done in fold so it won't happen inside &.
7220 Don't fold if this is for wide characters since it's too
7221 difficult to do correctly and this is a very rare case. */
7223 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7224 && TREE_CODE (array) == STRING_CST
7225 && TREE_CODE (index) == INTEGER_CST
7226 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7227 && GET_MODE_CLASS (mode) == MODE_INT
7228 && GET_MODE_SIZE (mode) == 1)
7229 return gen_int_mode (TREE_STRING_POINTER (array)
7230 [TREE_INT_CST_LOW (index)], mode);
7232 /* If this is a constant index into a constant array,
7233 just get the value from the array. Handle both the cases when
7234 we have an explicit constructor and when our operand is a variable
7235 that was declared const. */
7237 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7238 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7239 && TREE_CODE (index) == INTEGER_CST
7240 && 0 > compare_tree_int (index,
7241 list_length (CONSTRUCTOR_ELTS
7242 (TREE_OPERAND (exp, 0)))))
7246 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7247 i = TREE_INT_CST_LOW (index);
7248 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7252 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7256 else if (optimize >= 1
7257 && modifier != EXPAND_CONST_ADDRESS
7258 && modifier != EXPAND_INITIALIZER
7259 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7260 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7261 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7263 if (TREE_CODE (index) == INTEGER_CST)
7265 tree init = DECL_INITIAL (array);
7267 if (TREE_CODE (init) == CONSTRUCTOR)
7271 for (elem = CONSTRUCTOR_ELTS (init);
7273 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7274 elem = TREE_CHAIN (elem))
7277 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7278 return expand_expr (fold (TREE_VALUE (elem)), target,
7281 else if (TREE_CODE (init) == STRING_CST
7282 && 0 > compare_tree_int (index,
7283 TREE_STRING_LENGTH (init)))
7285 tree type = TREE_TYPE (TREE_TYPE (init));
7286 enum machine_mode mode = TYPE_MODE (type);
7288 if (GET_MODE_CLASS (mode) == MODE_INT
7289 && GET_MODE_SIZE (mode) == 1)
7290 return gen_int_mode (TREE_STRING_POINTER (init)
7291 [TREE_INT_CST_LOW (index)], mode);
7300 case ARRAY_RANGE_REF:
7301 /* If the operand is a CONSTRUCTOR, we can just extract the
7302 appropriate field if it is present. Don't do this if we have
7303 already written the data since we want to refer to that copy
7304 and varasm.c assumes that's what we'll do. */
7305 if (code == COMPONENT_REF
7306 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7307 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7311 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7312 elt = TREE_CHAIN (elt))
7313 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7314 /* We can normally use the value of the field in the
7315 CONSTRUCTOR. However, if this is a bitfield in
7316 an integral mode that we can fit in a HOST_WIDE_INT,
7317 we must mask only the number of bits in the bitfield,
7318 since this is done implicitly by the constructor. If
7319 the bitfield does not meet either of those conditions,
7320 we can't do this optimization. */
7321 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7322 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7324 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7325 <= HOST_BITS_PER_WIDE_INT))))
7327 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7328 && modifier == EXPAND_STACK_PARM)
7330 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7331 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7333 HOST_WIDE_INT bitsize
7334 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7335 enum machine_mode imode
7336 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7338 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7340 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7341 op0 = expand_and (imode, op0, op1, target);
7346 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7349 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7351 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7361 enum machine_mode mode1;
7362 HOST_WIDE_INT bitsize, bitpos;
7365 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7366 &mode1, &unsignedp, &volatilep);
7369 /* If we got back the original object, something is wrong. Perhaps
7370 we are evaluating an expression too early. In any event, don't
7371 infinitely recurse. */
7375 /* If TEM's type is a union of variable size, pass TARGET to the inner
7376 computation, since it will need a temporary and TARGET is known
7377 to have to do. This occurs in unchecked conversion in Ada. */
7381 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7382 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7384 && modifier != EXPAND_STACK_PARM
7385 ? target : NULL_RTX),
7387 (modifier == EXPAND_INITIALIZER
7388 || modifier == EXPAND_CONST_ADDRESS
7389 || modifier == EXPAND_STACK_PARM)
7390 ? modifier : EXPAND_NORMAL);
7392 /* If this is a constant, put it into a register if it is a
7393 legitimate constant and OFFSET is 0 and memory if it isn't. */
7394 if (CONSTANT_P (op0))
7396 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7397 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7399 op0 = force_reg (mode, op0);
7401 op0 = validize_mem (force_const_mem (mode, op0));
7406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7409 /* If this object is in a register, put it into memory.
7410 This case can't occur in C, but can in Ada if we have
7411 unchecked conversion of an expression from a scalar type to
7412 an array or record type. */
7413 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7414 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7416 /* If the operand is a SAVE_EXPR, we can deal with this by
7417 forcing the SAVE_EXPR into memory. */
7418 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7420 put_var_into_stack (TREE_OPERAND (exp, 0),
7422 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7427 = build_qualified_type (TREE_TYPE (tem),
7428 (TYPE_QUALS (TREE_TYPE (tem))
7429 | TYPE_QUAL_CONST));
7430 rtx memloc = assign_temp (nt, 1, 1, 1);
7432 emit_move_insn (memloc, op0);
7437 if (GET_CODE (op0) != MEM)
7440 #ifdef POINTERS_EXTEND_UNSIGNED
7441 if (GET_MODE (offset_rtx) != Pmode)
7442 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7444 if (GET_MODE (offset_rtx) != ptr_mode)
7445 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7448 /* A constant address in OP0 can have VOIDmode, we must not try
7449 to call force_reg for that case. Avoid that case. */
7450 if (GET_CODE (op0) == MEM
7451 && GET_MODE (op0) == BLKmode
7452 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7454 && (bitpos % bitsize) == 0
7455 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7456 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7458 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7462 op0 = offset_address (op0, offset_rtx,
7463 highest_pow2_factor (offset));
7466 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7467 record its alignment as BIGGEST_ALIGNMENT. */
7468 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7469 && is_aligning_offset (offset, tem))
7470 set_mem_align (op0, BIGGEST_ALIGNMENT);
7472 /* Don't forget about volatility even if this is a bitfield. */
7473 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7475 if (op0 == orig_op0)
7476 op0 = copy_rtx (op0);
7478 MEM_VOLATILE_P (op0) = 1;
7481 /* The following code doesn't handle CONCAT.
7482 Assume only bitpos == 0 can be used for CONCAT, due to
7483 one element arrays having the same mode as its element. */
7484 if (GET_CODE (op0) == CONCAT)
7486 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7491 /* In cases where an aligned union has an unaligned object
7492 as a field, we might be extracting a BLKmode value from
7493 an integer-mode (e.g., SImode) object. Handle this case
7494 by doing the extract into an object as wide as the field
7495 (which we know to be the width of a basic mode), then
7496 storing into memory, and changing the mode to BLKmode. */
7497 if (mode1 == VOIDmode
7498 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7499 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7500 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7502 && modifier != EXPAND_CONST_ADDRESS
7503 && modifier != EXPAND_INITIALIZER)
7504 /* If the field isn't aligned enough to fetch as a memref,
7505 fetch it as a bit field. */
7506 || (mode1 != BLKmode
7507 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7508 && ((TYPE_ALIGN (TREE_TYPE (tem))
7509 < GET_MODE_ALIGNMENT (mode))
7510 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7511 /* If the type and the field are a constant size and the
7512 size of the type isn't the same size as the bitfield,
7513 we must use bitfield operations. */
7515 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7517 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7520 enum machine_mode ext_mode = mode;
7522 if (ext_mode == BLKmode
7523 && ! (target != 0 && GET_CODE (op0) == MEM
7524 && GET_CODE (target) == MEM
7525 && bitpos % BITS_PER_UNIT == 0))
7526 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7528 if (ext_mode == BLKmode)
7530 /* In this case, BITPOS must start at a byte boundary and
7531 TARGET, if specified, must be a MEM. */
7532 if (GET_CODE (op0) != MEM
7533 || (target != 0 && GET_CODE (target) != MEM)
7534 || bitpos % BITS_PER_UNIT != 0)
7537 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7539 target = assign_temp (type, 0, 1, 1);
7541 emit_block_move (target, op0,
7542 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7544 (modifier == EXPAND_STACK_PARM
7545 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7550 op0 = validize_mem (op0);
7552 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7553 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7555 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7556 (modifier == EXPAND_STACK_PARM
7557 ? NULL_RTX : target),
7559 int_size_in_bytes (TREE_TYPE (tem)));
7561 /* If the result is a record type and BITSIZE is narrower than
7562 the mode of OP0, an integral mode, and this is a big endian
7563 machine, we must put the field into the high-order bits. */
7564 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7566 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7567 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7568 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7572 if (mode == BLKmode)
7574 rtx new = assign_temp (build_qualified_type
7575 ((*lang_hooks.types.type_for_mode)
7577 TYPE_QUAL_CONST), 0, 1, 1);
7579 emit_move_insn (new, op0);
7580 op0 = copy_rtx (new);
7581 PUT_MODE (op0, BLKmode);
7582 set_mem_attributes (op0, exp, 1);
7588 /* If the result is BLKmode, use that to access the object
7590 if (mode == BLKmode)
7593 /* Get a reference to just this component. */
7594 if (modifier == EXPAND_CONST_ADDRESS
7595 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7596 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7598 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7600 if (op0 == orig_op0)
7601 op0 = copy_rtx (op0);
7603 set_mem_attributes (op0, exp, 0);
7604 if (GET_CODE (XEXP (op0, 0)) == REG)
7605 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7607 MEM_VOLATILE_P (op0) |= volatilep;
7608 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7609 || modifier == EXPAND_CONST_ADDRESS
7610 || modifier == EXPAND_INITIALIZER)
7612 else if (target == 0)
7613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7615 convert_move (target, op0, unsignedp);
7621 rtx insn, before = get_last_insn (), vtbl_ref;
7623 /* Evaluate the interior expression. */
7624 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7627 /* Get or create an instruction off which to hang a note. */
7628 if (REG_P (subtarget))
7631 insn = get_last_insn ();
7634 if (! INSN_P (insn))
7635 insn = prev_nonnote_insn (insn);
7639 target = gen_reg_rtx (GET_MODE (subtarget));
7640 insn = emit_move_insn (target, subtarget);
7643 /* Collect the data for the note. */
7644 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7645 vtbl_ref = plus_constant (vtbl_ref,
7646 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7647 /* Discard the initial CONST that was added. */
7648 vtbl_ref = XEXP (vtbl_ref, 0);
7651 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7656 /* Intended for a reference to a buffer of a file-object in Pascal.
7657 But it's not certain that a special tree code will really be
7658 necessary for these. INDIRECT_REF might work for them. */
7664 /* Pascal set IN expression.
7667 rlo = set_low - (set_low%bits_per_word);
7668 the_word = set [ (index - rlo)/bits_per_word ];
7669 bit_index = index % bits_per_word;
7670 bitmask = 1 << bit_index;
7671 return !!(the_word & bitmask); */
7673 tree set = TREE_OPERAND (exp, 0);
7674 tree index = TREE_OPERAND (exp, 1);
7675 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7676 tree set_type = TREE_TYPE (set);
7677 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7678 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7679 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7680 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7681 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7682 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7683 rtx setaddr = XEXP (setval, 0);
7684 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7686 rtx diff, quo, rem, addr, bit, result;
7688 /* If domain is empty, answer is no. Likewise if index is constant
7689 and out of bounds. */
7690 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7691 && TREE_CODE (set_low_bound) == INTEGER_CST
7692 && tree_int_cst_lt (set_high_bound, set_low_bound))
7693 || (TREE_CODE (index) == INTEGER_CST
7694 && TREE_CODE (set_low_bound) == INTEGER_CST
7695 && tree_int_cst_lt (index, set_low_bound))
7696 || (TREE_CODE (set_high_bound) == INTEGER_CST
7697 && TREE_CODE (index) == INTEGER_CST
7698 && tree_int_cst_lt (set_high_bound, index))))
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7704 /* If we get here, we have to generate the code for both cases
7705 (in range and out of range). */
7707 op0 = gen_label_rtx ();
7708 op1 = gen_label_rtx ();
7710 if (! (GET_CODE (index_val) == CONST_INT
7711 && GET_CODE (lo_r) == CONST_INT))
7712 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7713 GET_MODE (index_val), iunsignedp, op1);
7715 if (! (GET_CODE (index_val) == CONST_INT
7716 && GET_CODE (hi_r) == CONST_INT))
7717 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7718 GET_MODE (index_val), iunsignedp, op1);
7720 /* Calculate the element number of bit zero in the first word
7722 if (GET_CODE (lo_r) == CONST_INT)
7723 rlow = GEN_INT (INTVAL (lo_r)
7724 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7726 rlow = expand_binop (index_mode, and_optab, lo_r,
7727 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7728 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7730 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7731 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7733 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7734 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7735 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7736 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7738 addr = memory_address (byte_mode,
7739 expand_binop (index_mode, add_optab, diff,
7740 setaddr, NULL_RTX, iunsignedp,
7743 /* Extract the bit we want to examine. */
7744 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7745 gen_rtx_MEM (byte_mode, addr),
7746 make_tree (TREE_TYPE (index), rem),
7748 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7749 GET_MODE (target) == byte_mode ? target : 0,
7750 1, OPTAB_LIB_WIDEN);
7752 if (result != target)
7753 convert_move (target, result, 1);
7755 /* Output the code to handle the out-of-range case. */
7758 emit_move_insn (target, const0_rtx);
7763 case WITH_CLEANUP_EXPR:
7764 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7766 WITH_CLEANUP_EXPR_RTL (exp)
7767 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7768 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7769 CLEANUP_EH_ONLY (exp));
7771 /* That's it for this cleanup. */
7772 TREE_OPERAND (exp, 1) = 0;
7774 return WITH_CLEANUP_EXPR_RTL (exp);
7776 case CLEANUP_POINT_EXPR:
7778 /* Start a new binding layer that will keep track of all cleanup
7779 actions to be performed. */
7780 expand_start_bindings (2);
7782 target_temp_slot_level = temp_slot_level;
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7785 /* If we're going to use this value, load it up now. */
7787 op0 = force_not_mem (op0);
7788 preserve_temp_slots (op0);
7789 expand_end_bindings (NULL_TREE, 0, 0);
7794 /* Check for a built-in function. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7796 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7798 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7800 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7801 == BUILT_IN_FRONTEND)
7802 return (*lang_hooks.expand_expr) (exp, original_target,
7805 return expand_builtin (exp, target, subtarget, tmode, ignore);
7808 return expand_call (exp, target, ignore);
7810 case NON_LVALUE_EXPR:
7813 case REFERENCE_EXPR:
7814 if (TREE_OPERAND (exp, 0) == error_mark_node)
7817 if (TREE_CODE (type) == UNION_TYPE)
7819 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7821 /* If both input and output are BLKmode, this conversion isn't doing
7822 anything except possibly changing memory attribute. */
7823 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7825 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7828 result = copy_rtx (result);
7829 set_mem_attributes (result, exp, 0);
7834 target = assign_temp (type, 0, 1, 1);
7836 if (GET_CODE (target) == MEM)
7837 /* Store data into beginning of memory target. */
7838 store_expr (TREE_OPERAND (exp, 0),
7839 adjust_address (target, TYPE_MODE (valtype), 0),
7840 modifier == EXPAND_STACK_PARM ? 2 : 0);
7842 else if (GET_CODE (target) == REG)
7843 /* Store this field into a union of the proper type. */
7844 store_field (target,
7845 MIN ((int_size_in_bytes (TREE_TYPE
7846 (TREE_OPERAND (exp, 0)))
7848 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7849 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7850 VOIDmode, 0, type, 0);
7854 /* Return the entire union. */
7858 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7863 /* If the signedness of the conversion differs and OP0 is
7864 a promoted SUBREG, clear that indication since we now
7865 have to do the proper extension. */
7866 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7867 && GET_CODE (op0) == SUBREG)
7868 SUBREG_PROMOTED_VAR_P (op0) = 0;
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7874 if (GET_MODE (op0) == mode)
7877 /* If OP0 is a constant, just convert it into the proper mode. */
7878 if (CONSTANT_P (op0))
7880 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7881 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7883 if (modifier == EXPAND_INITIALIZER)
7884 return simplify_gen_subreg (mode, op0, inner_mode,
7885 subreg_lowpart_offset (mode,
7888 return convert_modes (mode, inner_mode, op0,
7889 TREE_UNSIGNED (inner_type));
7892 if (modifier == EXPAND_INITIALIZER)
7893 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7897 convert_to_mode (mode, op0,
7898 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7900 convert_move (target, op0,
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7904 case VIEW_CONVERT_EXPR:
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7907 /* If the input and output modes are both the same, we are done.
7908 Otherwise, if neither mode is BLKmode and both are within a word, we
7909 can use gen_lowpart. If neither is true, make sure the operand is
7910 in memory and convert the MEM to the new mode. */
7911 if (TYPE_MODE (type) == GET_MODE (op0))
7913 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7914 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7915 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7916 op0 = gen_lowpart (TYPE_MODE (type), op0);
7917 else if (GET_CODE (op0) != MEM)
7919 /* If the operand is not a MEM, force it into memory. Since we
7920 are going to be be changing the mode of the MEM, don't call
7921 force_const_mem for constants because we don't allow pool
7922 constants to change mode. */
7923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7925 if (TREE_ADDRESSABLE (exp))
7928 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7930 = assign_stack_temp_for_type
7931 (TYPE_MODE (inner_type),
7932 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7934 emit_move_insn (target, op0);
7938 /* At this point, OP0 is in the correct mode. If the output type is such
7939 that the operand is known to be aligned, indicate that it is.
7940 Otherwise, we need only be concerned about alignment for non-BLKmode
7942 if (GET_CODE (op0) == MEM)
7944 op0 = copy_rtx (op0);
7946 if (TYPE_ALIGN_OK (type))
7947 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7948 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7949 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7951 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7952 HOST_WIDE_INT temp_size
7953 = MAX (int_size_in_bytes (inner_type),
7954 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7955 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7956 temp_size, 0, type);
7957 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7959 if (TREE_ADDRESSABLE (exp))
7962 if (GET_MODE (op0) == BLKmode)
7963 emit_block_move (new_with_op0_mode, op0,
7964 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7965 (modifier == EXPAND_STACK_PARM
7966 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7968 emit_move_insn (new_with_op0_mode, op0);
7973 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7979 this_optab = ! unsignedp && flag_trapv
7980 && (GET_MODE_CLASS (mode) == MODE_INT)
7981 ? addv_optab : add_optab;
7983 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7984 something else, make sure we add the register to the constant and
7985 then to the other thing. This case can occur during strength
7986 reduction and doing it this way will produce better code if the
7987 frame pointer or argument pointer is eliminated.
7989 fold-const.c will ensure that the constant is always in the inner
7990 PLUS_EXPR, so the only case we need to do anything about is if
7991 sp, ap, or fp is our second argument, in which case we must swap
7992 the innermost first argument and our second argument. */
7994 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7995 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7996 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7997 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7998 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7999 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8001 tree t = TREE_OPERAND (exp, 1);
8003 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8004 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8007 /* If the result is to be ptr_mode and we are adding an integer to
8008 something, we might be forming a constant. So try to use
8009 plus_constant. If it produces a sum and we can't accept it,
8010 use force_operand. This allows P = &ARR[const] to generate
8011 efficient code on machines where a SYMBOL_REF is not a valid
8014 If this is an EXPAND_SUM call, always return the sum. */
8015 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8016 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8018 if (modifier == EXPAND_STACK_PARM)
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8021 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8022 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8026 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8028 /* Use immed_double_const to ensure that the constant is
8029 truncated according to the mode of OP1, then sign extended
8030 to a HOST_WIDE_INT. Using the constant directly can result
8031 in non-canonical RTL in a 64x32 cross compile. */
8033 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8035 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8036 op1 = plus_constant (op1, INTVAL (constant_part));
8037 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8038 op1 = force_operand (op1, target);
8042 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8043 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8044 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8049 (modifier == EXPAND_INITIALIZER
8050 ? EXPAND_INITIALIZER : EXPAND_SUM));
8051 if (! CONSTANT_P (op0))
8053 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8054 VOIDmode, modifier);
8055 /* Don't go to both_summands if modifier
8056 says it's not right to return a PLUS. */
8057 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8061 /* Use immed_double_const to ensure that the constant is
8062 truncated according to the mode of OP1, then sign extended
8063 to a HOST_WIDE_INT. Using the constant directly can result
8064 in non-canonical RTL in a 64x32 cross compile. */
8066 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8068 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8069 op0 = plus_constant (op0, INTVAL (constant_part));
8070 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8071 op0 = force_operand (op0, target);
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8079 /* No sense saving up arithmetic to be done
8080 if it's all in the wrong mode to form part of an address.
8081 And force_operand won't know whether to sign-extend or
8083 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8084 || mode != ptr_mode)
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8088 if (op0 == const0_rtx)
8090 if (op1 == const0_rtx)
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8096 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8098 /* We come here from MINUS_EXPR when the second operand is a
8101 /* Make sure any term that's a sum with a constant comes last. */
8102 if (GET_CODE (op0) == PLUS
8103 && CONSTANT_P (XEXP (op0, 1)))
8109 /* If adding to a sum including a constant,
8110 associate it to put the constant outside. */
8111 if (GET_CODE (op1) == PLUS
8112 && CONSTANT_P (XEXP (op1, 1)))
8114 rtx constant_term = const0_rtx;
8116 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8119 /* Ensure that MULT comes first if there is one. */
8120 else if (GET_CODE (op0) == MULT)
8121 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8123 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8125 /* Let's also eliminate constants from op0 if possible. */
8126 op0 = eliminate_constant_term (op0, &constant_term);
8128 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8129 their sum should be a constant. Form it into OP1, since the
8130 result we want will then be OP0 + OP1. */
8132 temp = simplify_binary_operation (PLUS, mode, constant_term,
8137 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8140 /* Put a constant term last and put a multiplication first. */
8141 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8142 temp = op1, op1 = op0, op0 = temp;
8144 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8145 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8148 /* For initializers, we are allowed to return a MINUS of two
8149 symbolic constants. Here we handle all cases when both operands
8151 /* Handle difference of two symbolic constants,
8152 for the sake of an initializer. */
8153 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8154 && really_constant_p (TREE_OPERAND (exp, 0))
8155 && really_constant_p (TREE_OPERAND (exp, 1)))
8157 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8159 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8162 /* If the last operand is a CONST_INT, use plus_constant of
8163 the negated constant. Else make the MINUS. */
8164 if (GET_CODE (op1) == CONST_INT)
8165 return plus_constant (op0, - INTVAL (op1));
8167 return gen_rtx_MINUS (mode, op0, op1);
8170 this_optab = ! unsignedp && flag_trapv
8171 && (GET_MODE_CLASS(mode) == MODE_INT)
8172 ? subv_optab : sub_optab;
8174 /* No sense saving up arithmetic to be done
8175 if it's all in the wrong mode to form part of an address.
8176 And force_operand won't know whether to sign-extend or
8178 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8179 || mode != ptr_mode)
8182 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8186 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8188 /* Convert A - const to A + (-const). */
8189 if (GET_CODE (op1) == CONST_INT)
8191 op1 = negate_rtx (mode, op1);
8198 /* If first operand is constant, swap them.
8199 Thus the following special case checks need only
8200 check the second operand. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8203 tree t1 = TREE_OPERAND (exp, 0);
8204 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8205 TREE_OPERAND (exp, 1) = t1;
8208 /* Attempt to return something suitable for generating an
8209 indexed address, for machines that support that. */
8211 if (modifier == EXPAND_SUM && mode == ptr_mode
8212 && host_integerp (TREE_OPERAND (exp, 1), 0))
8214 tree exp1 = TREE_OPERAND (exp, 1);
8216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8219 /* If we knew for certain that this is arithmetic for an array
8220 reference, and we knew the bounds of the array, then we could
8221 apply the distributive law across (PLUS X C) for constant C.
8222 Without such knowledge, we risk overflowing the computation
8223 when both X and C are large, but X+C isn't. */
8224 /* ??? Could perhaps special-case EXP being unsigned and C being
8225 positive. In that case we are certain that X+C is no smaller
8226 than X and so the transformed expression will overflow iff the
8227 original would have. */
8229 if (GET_CODE (op0) != REG)
8230 op0 = force_operand (op0, NULL_RTX);
8231 if (GET_CODE (op0) != REG)
8232 op0 = copy_to_mode_reg (mode, op0);
8234 return gen_rtx_MULT (mode, op0,
8235 gen_int_mode (tree_low_cst (exp1, 0),
8236 TYPE_MODE (TREE_TYPE (exp1))));
8239 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8242 if (modifier == EXPAND_STACK_PARM)
8245 /* Check for multiplying things that have been extended
8246 from a narrower type. If this machine supports multiplying
8247 in that narrower type with a result in the desired type,
8248 do it that way, and avoid the explicit type-conversion. */
8249 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8250 && TREE_CODE (type) == INTEGER_TYPE
8251 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8253 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8254 && int_fits_type_p (TREE_OPERAND (exp, 1),
8255 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8256 /* Don't use a widening multiply if a shift will do. */
8257 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8258 > HOST_BITS_PER_WIDE_INT)
8259 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8261 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8262 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8264 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8265 /* If both operands are extended, they must either both
8266 be zero-extended or both be sign-extended. */
8267 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8269 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8271 enum machine_mode innermode
8272 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8273 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8274 ? smul_widen_optab : umul_widen_optab);
8275 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8276 ? umul_widen_optab : smul_widen_optab);
8277 if (mode == GET_MODE_WIDER_MODE (innermode))
8279 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8281 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8282 NULL_RTX, VOIDmode, 0);
8283 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8287 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8288 NULL_RTX, VOIDmode, 0);
8291 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8292 && innermode == word_mode)
8295 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8296 NULL_RTX, VOIDmode, 0);
8297 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8298 op1 = convert_modes (innermode, mode,
8299 expand_expr (TREE_OPERAND (exp, 1),
8300 NULL_RTX, VOIDmode, 0),
8303 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8304 NULL_RTX, VOIDmode, 0);
8305 temp = expand_binop (mode, other_optab, op0, op1, target,
8306 unsignedp, OPTAB_LIB_WIDEN);
8307 htem = expand_mult_highpart_adjust (innermode,
8308 gen_highpart (innermode, temp),
8310 gen_highpart (innermode, temp),
8312 emit_move_insn (gen_highpart (innermode, temp), htem);
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8319 return expand_mult (mode, op0, op1, target, unsignedp);
8321 case TRUNC_DIV_EXPR:
8322 case FLOOR_DIV_EXPR:
8324 case ROUND_DIV_EXPR:
8325 case EXACT_DIV_EXPR:
8326 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8328 if (modifier == EXPAND_STACK_PARM)
8330 /* Possible optimization: compute the dividend with EXPAND_SUM
8331 then if the divisor is constant can optimize the case
8332 where some terms of the dividend have coeffs divisible by it. */
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8335 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8338 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8339 expensive divide. If not, combine will rebuild the original
8341 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8342 && TREE_CODE (type) == REAL_TYPE
8343 && !real_onep (TREE_OPERAND (exp, 0)))
8344 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8345 build (RDIV_EXPR, type,
8346 build_real (type, dconst1),
8347 TREE_OPERAND (exp, 1))),
8348 target, tmode, modifier);
8349 this_optab = sdiv_optab;
8352 case TRUNC_MOD_EXPR:
8353 case FLOOR_MOD_EXPR:
8355 case ROUND_MOD_EXPR:
8356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8358 if (modifier == EXPAND_STACK_PARM)
8360 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8361 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8362 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8364 case FIX_ROUND_EXPR:
8365 case FIX_FLOOR_EXPR:
8367 abort (); /* Not used for C. */
8369 case FIX_TRUNC_EXPR:
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8371 if (target == 0 || modifier == EXPAND_STACK_PARM)
8372 target = gen_reg_rtx (mode);
8373 expand_fix (target, op0, unsignedp);
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8378 if (target == 0 || modifier == EXPAND_STACK_PARM)
8379 target = gen_reg_rtx (mode);
8380 /* expand_float can't figure out what to do if FROM has VOIDmode.
8381 So give it the correct mode. With -O, cse will optimize this. */
8382 if (GET_MODE (op0) == VOIDmode)
8383 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8385 expand_float (target, op0,
8386 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 if (modifier == EXPAND_STACK_PARM)
8393 temp = expand_unop (mode,
8394 ! unsignedp && flag_trapv
8395 && (GET_MODE_CLASS(mode) == MODE_INT)
8396 ? negv_optab : neg_optab, op0, target, 0);
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403 if (modifier == EXPAND_STACK_PARM)
8406 /* Handle complex values specially. */
8407 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8408 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8409 return expand_complex_abs (mode, op0, target, unsignedp);
8411 /* Unsigned abs is simply the operand. Testing here means we don't
8412 risk generating incorrect code below. */
8413 if (TREE_UNSIGNED (type))
8416 return expand_abs (mode, op0, target, unsignedp,
8417 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8421 target = original_target;
8423 || modifier == EXPAND_STACK_PARM
8424 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8425 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8426 || GET_MODE (target) != mode
8427 || (GET_CODE (target) == REG
8428 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8429 target = gen_reg_rtx (mode);
8430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8431 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8433 /* First try to do it with a special MIN or MAX instruction.
8434 If that does not win, use a conditional jump to select the proper
8436 this_optab = (TREE_UNSIGNED (type)
8437 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8438 : (code == MIN_EXPR ? smin_optab : smax_optab));
8440 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8445 /* At this point, a MEM target is no longer useful; we will get better
8448 if (GET_CODE (target) == MEM)
8449 target = gen_reg_rtx (mode);
8452 emit_move_insn (target, op0);
8454 op0 = gen_label_rtx ();
8456 /* If this mode is an integer too wide to compare properly,
8457 compare word by word. Rely on cse to optimize constant cases. */
8458 if (GET_MODE_CLASS (mode) == MODE_INT
8459 && ! can_compare_p (GE, mode, ccp_jump))
8461 if (code == MAX_EXPR)
8462 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8463 target, op1, NULL_RTX, op0);
8465 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8466 op1, target, NULL_RTX, op0);
8470 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8471 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8472 unsignedp, mode, NULL_RTX, NULL_RTX,
8475 emit_move_insn (target, op1);
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8481 if (modifier == EXPAND_STACK_PARM)
8483 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8490 if (modifier == EXPAND_STACK_PARM)
8492 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8499 temp = expand_unop (mode, clz_optab, op0, target, 1);
8505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8506 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8513 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8520 temp = expand_unop (mode, parity_optab, op0, target, 1);
8525 /* ??? Can optimize bitwise operations with one arg constant.
8526 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8527 and (a bitwise1 b) bitwise2 b (etc)
8528 but that is probably not worth while. */
8530 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8531 boolean values when we want in all cases to compute both of them. In
8532 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8533 as actual zero-or-1 values and then bitwise anding. In cases where
8534 there cannot be any side effects, better code would be made by
8535 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8536 how to recognize those cases. */
8538 case TRUTH_AND_EXPR:
8540 this_optab = and_optab;
8545 this_optab = ior_optab;
8548 case TRUTH_XOR_EXPR:
8550 this_optab = xor_optab;
8557 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8559 if (modifier == EXPAND_STACK_PARM)
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8562 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8565 /* Could determine the answer when only additive constants differ. Also,
8566 the addition of one can be handled by changing the condition. */
8573 case UNORDERED_EXPR:
8580 temp = do_store_flag (exp,
8581 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8582 tmode != VOIDmode ? tmode : mode, 0);
8586 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8587 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8589 && GET_CODE (original_target) == REG
8590 && (GET_MODE (original_target)
8591 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8593 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8596 /* If temp is constant, we can just compute the result. */
8597 if (GET_CODE (temp) == CONST_INT)
8599 if (INTVAL (temp) != 0)
8600 emit_move_insn (target, const1_rtx);
8602 emit_move_insn (target, const0_rtx);
8607 if (temp != original_target)
8609 enum machine_mode mode1 = GET_MODE (temp);
8610 if (mode1 == VOIDmode)
8611 mode1 = tmode != VOIDmode ? tmode : mode;
8613 temp = copy_to_mode_reg (mode1, temp);
8616 op1 = gen_label_rtx ();
8617 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8618 GET_MODE (temp), unsignedp, op1);
8619 emit_move_insn (temp, const1_rtx);
8624 /* If no set-flag instruction, must generate a conditional
8625 store into a temporary variable. Drop through
8626 and handle this like && and ||. */
8628 case TRUTH_ANDIF_EXPR:
8629 case TRUTH_ORIF_EXPR:
8632 || modifier == EXPAND_STACK_PARM
8633 || ! safe_from_p (target, exp, 1)
8634 /* Make sure we don't have a hard reg (such as function's return
8635 value) live across basic blocks, if not optimizing. */
8636 || (!optimize && GET_CODE (target) == REG
8637 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8641 emit_clr_insn (target);
8643 op1 = gen_label_rtx ();
8644 jumpifnot (exp, op1);
8647 emit_0_to_1_insn (target);
8650 return ignore ? const0_rtx : target;
8652 case TRUTH_NOT_EXPR:
8653 if (modifier == EXPAND_STACK_PARM)
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8656 /* The parser is careful to generate TRUTH_NOT_EXPR
8657 only with operands that are always zero or one. */
8658 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8659 target, 1, OPTAB_LIB_WIDEN);
8665 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8667 return expand_expr (TREE_OPERAND (exp, 1),
8668 (ignore ? const0_rtx : target),
8669 VOIDmode, modifier);
8672 /* If we would have a "singleton" (see below) were it not for a
8673 conversion in each arm, bring that conversion back out. */
8674 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8675 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8679 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8680 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8682 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8683 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8685 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8686 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8687 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8688 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8689 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8690 return expand_expr (build1 (NOP_EXPR, type,
8691 build (COND_EXPR, TREE_TYPE (iftrue),
8692 TREE_OPERAND (exp, 0),
8694 target, tmode, modifier);
8698 /* Note that COND_EXPRs whose type is a structure or union
8699 are required to be constructed to contain assignments of
8700 a temporary variable, so that we can evaluate them here
8701 for side effect only. If type is void, we must do likewise. */
8703 /* If an arm of the branch requires a cleanup,
8704 only that cleanup is performed. */
8707 tree binary_op = 0, unary_op = 0;
8709 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8710 convert it to our mode, if necessary. */
8711 if (integer_onep (TREE_OPERAND (exp, 1))
8712 && integer_zerop (TREE_OPERAND (exp, 2))
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8722 if (modifier == EXPAND_STACK_PARM)
8724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8725 if (GET_MODE (op0) == mode)
8729 target = gen_reg_rtx (mode);
8730 convert_move (target, op0, unsignedp);
8734 /* Check for X ? A + B : A. If we have this, we can copy A to the
8735 output and conditionally add B. Similarly for unary operations.
8736 Don't do this if X has side-effects because those side effects
8737 might affect A or B and the "?" operation is a sequence point in
8738 ANSI. (operand_equal_p tests for side effects.) */
8740 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp, 2),
8742 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8743 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8745 && operand_equal_p (TREE_OPERAND (exp, 1),
8746 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8747 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp, 2),
8750 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8751 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8752 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8753 && operand_equal_p (TREE_OPERAND (exp, 1),
8754 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8755 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8757 /* If we are not to produce a result, we have no target. Otherwise,
8758 if a target was specified use it; it will not be used as an
8759 intermediate target unless it is safe. If no target, use a
8764 else if (modifier == EXPAND_STACK_PARM)
8765 temp = assign_temp (type, 0, 0, 1);
8766 else if (original_target
8767 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8768 || (singleton && GET_CODE (original_target) == REG
8769 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8770 && original_target == var_rtx (singleton)))
8771 && GET_MODE (original_target) == mode
8772 #ifdef HAVE_conditional_move
8773 && (! can_conditionally_move_p (mode)
8774 || GET_CODE (original_target) == REG
8775 || TREE_ADDRESSABLE (type))
8777 && (GET_CODE (original_target) != MEM
8778 || TREE_ADDRESSABLE (type)))
8779 temp = original_target;
8780 else if (TREE_ADDRESSABLE (type))
8783 temp = assign_temp (type, 0, 0, 1);
8785 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8786 do the test of X as a store-flag operation, do this as
8787 A + ((X != 0) << log C). Similarly for other simple binary
8788 operators. Only do for C == 1 if BRANCH_COST is low. */
8789 if (temp && singleton && binary_op
8790 && (TREE_CODE (binary_op) == PLUS_EXPR
8791 || TREE_CODE (binary_op) == MINUS_EXPR
8792 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8793 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8794 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8795 : integer_onep (TREE_OPERAND (binary_op, 1)))
8796 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8800 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8801 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8802 ? addv_optab : add_optab)
8803 : TREE_CODE (binary_op) == MINUS_EXPR
8804 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8805 ? subv_optab : sub_optab)
8806 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8809 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8810 if (singleton == TREE_OPERAND (exp, 1))
8811 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8813 cond = TREE_OPERAND (exp, 0);
8815 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8817 mode, BRANCH_COST <= 1);
8819 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8820 result = expand_shift (LSHIFT_EXPR, mode, result,
8821 build_int_2 (tree_log2
8825 (safe_from_p (temp, singleton, 1)
8826 ? temp : NULL_RTX), 0);
8830 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8831 return expand_binop (mode, boptab, op1, result, temp,
8832 unsignedp, OPTAB_LIB_WIDEN);
8836 do_pending_stack_adjust ();
8838 op0 = gen_label_rtx ();
8840 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8844 /* If the target conflicts with the other operand of the
8845 binary op, we can't use it. Also, we can't use the target
8846 if it is a hard register, because evaluating the condition
8847 might clobber it. */
8849 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8850 || (GET_CODE (temp) == REG
8851 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8852 temp = gen_reg_rtx (mode);
8853 store_expr (singleton, temp,
8854 modifier == EXPAND_STACK_PARM ? 2 : 0);
8857 expand_expr (singleton,
8858 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8859 if (singleton == TREE_OPERAND (exp, 1))
8860 jumpif (TREE_OPERAND (exp, 0), op0);
8862 jumpifnot (TREE_OPERAND (exp, 0), op0);
8864 start_cleanup_deferral ();
8865 if (binary_op && temp == 0)
8866 /* Just touch the other operand. */
8867 expand_expr (TREE_OPERAND (binary_op, 1),
8868 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8870 store_expr (build (TREE_CODE (binary_op), type,
8871 make_tree (type, temp),
8872 TREE_OPERAND (binary_op, 1)),
8873 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8875 store_expr (build1 (TREE_CODE (unary_op), type,
8876 make_tree (type, temp)),
8877 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8880 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8881 comparison operator. If we have one of these cases, set the
8882 output to A, branch on A (cse will merge these two references),
8883 then set the output to FOO. */
8885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8886 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8887 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8888 TREE_OPERAND (exp, 1), 0)
8889 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8890 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8891 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8893 if (GET_CODE (temp) == REG
8894 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8895 temp = gen_reg_rtx (mode);
8896 store_expr (TREE_OPERAND (exp, 1), temp,
8897 modifier == EXPAND_STACK_PARM ? 2 : 0);
8898 jumpif (TREE_OPERAND (exp, 0), op0);
8900 start_cleanup_deferral ();
8901 store_expr (TREE_OPERAND (exp, 2), temp,
8902 modifier == EXPAND_STACK_PARM ? 2 : 0);
8906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8907 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8909 TREE_OPERAND (exp, 2), 0)
8910 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8911 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8912 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8914 if (GET_CODE (temp) == REG
8915 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8916 temp = gen_reg_rtx (mode);
8917 store_expr (TREE_OPERAND (exp, 2), temp,
8918 modifier == EXPAND_STACK_PARM ? 2 : 0);
8919 jumpifnot (TREE_OPERAND (exp, 0), op0);
8921 start_cleanup_deferral ();
8922 store_expr (TREE_OPERAND (exp, 1), temp,
8923 modifier == EXPAND_STACK_PARM ? 2 : 0);
8928 op1 = gen_label_rtx ();
8929 jumpifnot (TREE_OPERAND (exp, 0), op0);
8931 start_cleanup_deferral ();
8933 /* One branch of the cond can be void, if it never returns. For
8934 example A ? throw : E */
8936 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8937 store_expr (TREE_OPERAND (exp, 1), temp,
8938 modifier == EXPAND_STACK_PARM ? 2 : 0);
8940 expand_expr (TREE_OPERAND (exp, 1),
8941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8942 end_cleanup_deferral ();
8944 emit_jump_insn (gen_jump (op1));
8947 start_cleanup_deferral ();
8949 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8950 store_expr (TREE_OPERAND (exp, 2), temp,
8951 modifier == EXPAND_STACK_PARM ? 2 : 0);
8953 expand_expr (TREE_OPERAND (exp, 2),
8954 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8957 end_cleanup_deferral ();
8968 /* Something needs to be initialized, but we didn't know
8969 where that thing was when building the tree. For example,
8970 it could be the return value of a function, or a parameter
8971 to a function which lays down in the stack, or a temporary
8972 variable which must be passed by reference.
8974 We guarantee that the expression will either be constructed
8975 or copied into our original target. */
8977 tree slot = TREE_OPERAND (exp, 0);
8978 tree cleanups = NULL_TREE;
8981 if (TREE_CODE (slot) != VAR_DECL)
8985 target = original_target;
8987 /* Set this here so that if we get a target that refers to a
8988 register variable that's already been used, put_reg_into_stack
8989 knows that it should fix up those uses. */
8990 TREE_USED (slot) = 1;
8994 if (DECL_RTL_SET_P (slot))
8996 target = DECL_RTL (slot);
8997 /* If we have already expanded the slot, so don't do
8999 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9004 target = assign_temp (type, 2, 0, 1);
9005 /* All temp slots at this level must not conflict. */
9006 preserve_temp_slots (target);
9007 SET_DECL_RTL (slot, target);
9008 if (TREE_ADDRESSABLE (slot))
9009 put_var_into_stack (slot, /*rescan=*/false);
9011 /* Since SLOT is not known to the called function
9012 to belong to its stack frame, we must build an explicit
9013 cleanup. This case occurs when we must build up a reference
9014 to pass the reference as an argument. In this case,
9015 it is very likely that such a reference need not be
9018 if (TREE_OPERAND (exp, 2) == 0)
9019 TREE_OPERAND (exp, 2)
9020 = (*lang_hooks.maybe_build_cleanup) (slot);
9021 cleanups = TREE_OPERAND (exp, 2);
9026 /* This case does occur, when expanding a parameter which
9027 needs to be constructed on the stack. The target
9028 is the actual stack address that we want to initialize.
9029 The function we call will perform the cleanup in this case. */
9031 /* If we have already assigned it space, use that space,
9032 not target that we were passed in, as our target
9033 parameter is only a hint. */
9034 if (DECL_RTL_SET_P (slot))
9036 target = DECL_RTL (slot);
9037 /* If we have already expanded the slot, so don't do
9039 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9044 SET_DECL_RTL (slot, target);
9045 /* If we must have an addressable slot, then make sure that
9046 the RTL that we just stored in slot is OK. */
9047 if (TREE_ADDRESSABLE (slot))
9048 put_var_into_stack (slot, /*rescan=*/true);
9052 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9053 /* Mark it as expanded. */
9054 TREE_OPERAND (exp, 1) = NULL_TREE;
9056 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9058 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9065 tree lhs = TREE_OPERAND (exp, 0);
9066 tree rhs = TREE_OPERAND (exp, 1);
9068 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9074 /* If lhs is complex, expand calls in rhs before computing it.
9075 That's so we don't compute a pointer and save it over a
9076 call. If lhs is simple, compute it first so we can give it
9077 as a target if the rhs is just a call. This avoids an
9078 extra temp and copy and that prevents a partial-subsumption
9079 which makes bad code. Actually we could treat
9080 component_ref's of vars like vars. */
9082 tree lhs = TREE_OPERAND (exp, 0);
9083 tree rhs = TREE_OPERAND (exp, 1);
9087 /* Check for |= or &= of a bitfield of size one into another bitfield
9088 of size 1. In this case, (unless we need the result of the
9089 assignment) we can do this more efficiently with a
9090 test followed by an assignment, if necessary.
9092 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9093 things change so we do, this code should be enhanced to
9096 && TREE_CODE (lhs) == COMPONENT_REF
9097 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9098 || TREE_CODE (rhs) == BIT_AND_EXPR)
9099 && TREE_OPERAND (rhs, 0) == lhs
9100 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9101 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9102 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9104 rtx label = gen_label_rtx ();
9106 do_jump (TREE_OPERAND (rhs, 1),
9107 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9108 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9109 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9110 (TREE_CODE (rhs) == BIT_IOR_EXPR
9112 : integer_zero_node)),
9114 do_pending_stack_adjust ();
9119 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9125 if (!TREE_OPERAND (exp, 0))
9126 expand_null_return ();
9128 expand_return (TREE_OPERAND (exp, 0));
9131 case PREINCREMENT_EXPR:
9132 case PREDECREMENT_EXPR:
9133 return expand_increment (exp, 0, ignore);
9135 case POSTINCREMENT_EXPR:
9136 case POSTDECREMENT_EXPR:
9137 /* Faster to treat as pre-increment if result is not used. */
9138 return expand_increment (exp, ! ignore, ignore);
9141 if (modifier == EXPAND_STACK_PARM)
9143 /* Are we taking the address of a nested function? */
9144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9145 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9146 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9147 && ! TREE_STATIC (exp))
9149 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9150 op0 = force_operand (op0, target);
9152 /* If we are taking the address of something erroneous, just
9154 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9156 /* If we are taking the address of a constant and are at the
9157 top level, we have to use output_constant_def since we can't
9158 call force_const_mem at top level. */
9160 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9161 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9163 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9166 /* We make sure to pass const0_rtx down if we came in with
9167 ignore set, to avoid doing the cleanups twice for something. */
9168 op0 = expand_expr (TREE_OPERAND (exp, 0),
9169 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9170 (modifier == EXPAND_INITIALIZER
9171 ? modifier : EXPAND_CONST_ADDRESS));
9173 /* If we are going to ignore the result, OP0 will have been set
9174 to const0_rtx, so just return it. Don't get confused and
9175 think we are taking the address of the constant. */
9179 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9180 clever and returns a REG when given a MEM. */
9181 op0 = protect_from_queue (op0, 1);
9183 /* We would like the object in memory. If it is a constant, we can
9184 have it be statically allocated into memory. For a non-constant,
9185 we need to allocate some memory and store the value into it. */
9187 if (CONSTANT_P (op0))
9188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9190 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9191 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9192 || GET_CODE (op0) == PARALLEL)
9194 /* If the operand is a SAVE_EXPR, we can deal with this by
9195 forcing the SAVE_EXPR into memory. */
9196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9198 put_var_into_stack (TREE_OPERAND (exp, 0),
9200 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9204 /* If this object is in a register, it can't be BLKmode. */
9205 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9206 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9208 if (GET_CODE (op0) == PARALLEL)
9209 /* Handle calls that pass values in multiple
9210 non-contiguous locations. The Irix 6 ABI has examples
9212 emit_group_store (memloc, op0,
9213 int_size_in_bytes (inner_type));
9215 emit_move_insn (memloc, op0);
9221 if (GET_CODE (op0) != MEM)
9224 mark_temp_addr_taken (op0);
9225 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9227 op0 = XEXP (op0, 0);
9228 #ifdef POINTERS_EXTEND_UNSIGNED
9229 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9230 && mode == ptr_mode)
9231 op0 = convert_memory_address (ptr_mode, op0);
9236 /* If OP0 is not aligned as least as much as the type requires, we
9237 need to make a temporary, copy OP0 to it, and take the address of
9238 the temporary. We want to use the alignment of the type, not of
9239 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9240 the test for BLKmode means that can't happen. The test for
9241 BLKmode is because we never make mis-aligned MEMs with
9244 We don't need to do this at all if the machine doesn't have
9245 strict alignment. */
9246 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9247 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9249 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9251 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9254 if (TYPE_ALIGN_OK (inner_type))
9257 if (TREE_ADDRESSABLE (inner_type))
9259 /* We can't make a bitwise copy of this object, so fail. */
9260 error ("cannot take the address of an unaligned member");
9264 new = assign_stack_temp_for_type
9265 (TYPE_MODE (inner_type),
9266 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9267 : int_size_in_bytes (inner_type),
9268 1, build_qualified_type (inner_type,
9269 (TYPE_QUALS (inner_type)
9270 | TYPE_QUAL_CONST)));
9272 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9273 (modifier == EXPAND_STACK_PARM
9274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9279 op0 = force_operand (XEXP (op0, 0), target);
9283 && GET_CODE (op0) != REG
9284 && modifier != EXPAND_CONST_ADDRESS
9285 && modifier != EXPAND_INITIALIZER
9286 && modifier != EXPAND_SUM)
9287 op0 = force_reg (Pmode, op0);
9289 if (GET_CODE (op0) == REG
9290 && ! REG_USERVAR_P (op0))
9291 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9293 #ifdef POINTERS_EXTEND_UNSIGNED
9294 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9295 && mode == ptr_mode)
9296 op0 = convert_memory_address (ptr_mode, op0);
9301 case ENTRY_VALUE_EXPR:
9304 /* COMPLEX type for Extended Pascal & Fortran */
9307 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9310 /* Get the rtx code of the operands. */
9311 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9312 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9319 /* Move the real (op0) and imaginary (op1) parts to their location. */
9320 emit_move_insn (gen_realpart (mode, target), op0);
9321 emit_move_insn (gen_imagpart (mode, target), op1);
9323 insns = get_insns ();
9326 /* Complex construction should appear as a single unit. */
9327 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9328 each with a separate pseudo as destination.
9329 It's not correct for flow to treat them as a unit. */
9330 if (GET_CODE (target) != CONCAT)
9331 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9340 return gen_realpart (mode, op0);
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9344 return gen_imagpart (mode, op0);
9348 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9355 target = gen_reg_rtx (mode);
9359 /* Store the realpart and the negated imagpart to target. */
9360 emit_move_insn (gen_realpart (partmode, target),
9361 gen_realpart (partmode, op0));
9363 imag_t = gen_imagpart (partmode, target);
9364 temp = expand_unop (partmode,
9365 ! unsignedp && flag_trapv
9366 && (GET_MODE_CLASS(partmode) == MODE_INT)
9367 ? negv_optab : neg_optab,
9368 gen_imagpart (partmode, op0), imag_t, 0);
9370 emit_move_insn (imag_t, temp);
9372 insns = get_insns ();
9375 /* Conjugate should appear as a single unit
9376 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9377 each with a separate pseudo as destination.
9378 It's not correct for flow to treat them as a unit. */
9379 if (GET_CODE (target) != CONCAT)
9380 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9387 case TRY_CATCH_EXPR:
9389 tree handler = TREE_OPERAND (exp, 1);
9391 expand_eh_region_start ();
9393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9395 expand_eh_region_end_cleanup (handler);
9400 case TRY_FINALLY_EXPR:
9402 tree try_block = TREE_OPERAND (exp, 0);
9403 tree finally_block = TREE_OPERAND (exp, 1);
9405 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9407 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9408 is not sufficient, so we cannot expand the block twice.
9409 So we play games with GOTO_SUBROUTINE_EXPR to let us
9410 expand the thing only once. */
9411 /* When not optimizing, we go ahead with this form since
9412 (1) user breakpoints operate more predictably without
9413 code duplication, and
9414 (2) we're not running any of the global optimizers
9415 that would explode in time/space with the highly
9416 connected CFG created by the indirect branching. */
9418 rtx finally_label = gen_label_rtx ();
9419 rtx done_label = gen_label_rtx ();
9420 rtx return_link = gen_reg_rtx (Pmode);
9421 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9422 (tree) finally_label, (tree) return_link);
9423 TREE_SIDE_EFFECTS (cleanup) = 1;
9425 /* Start a new binding layer that will keep track of all cleanup
9426 actions to be performed. */
9427 expand_start_bindings (2);
9428 target_temp_slot_level = temp_slot_level;
9430 expand_decl_cleanup (NULL_TREE, cleanup);
9431 op0 = expand_expr (try_block, target, tmode, modifier);
9433 preserve_temp_slots (op0);
9434 expand_end_bindings (NULL_TREE, 0, 0);
9435 emit_jump (done_label);
9436 emit_label (finally_label);
9437 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9438 emit_indirect_jump (return_link);
9439 emit_label (done_label);
9443 expand_start_bindings (2);
9444 target_temp_slot_level = temp_slot_level;
9446 expand_decl_cleanup (NULL_TREE, finally_block);
9447 op0 = expand_expr (try_block, target, tmode, modifier);
9449 preserve_temp_slots (op0);
9450 expand_end_bindings (NULL_TREE, 0, 0);
9456 case GOTO_SUBROUTINE_EXPR:
9458 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9459 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9460 rtx return_address = gen_label_rtx ();
9461 emit_move_insn (return_link,
9462 gen_rtx_LABEL_REF (Pmode, return_address));
9464 emit_label (return_address);
9469 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9472 return get_exception_pointer (cfun);
9475 /* Function descriptors are not valid except for as
9476 initialization constants, and should not be expanded. */
9480 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9483 /* Here to do an ordinary binary operator, generating an instruction
9484 from the optab already placed in `this_optab'. */
9486 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9491 if (modifier == EXPAND_STACK_PARM)
9493 temp = expand_binop (mode, this_optab, op0, op1, target,
9494 unsignedp, OPTAB_LIB_WIDEN);
9500 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9501 when applied to the address of EXP produces an address known to be
9502 aligned more than BIGGEST_ALIGNMENT. */
9505 is_aligning_offset (offset, exp)
9509 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9510 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9511 || TREE_CODE (offset) == NOP_EXPR
9512 || TREE_CODE (offset) == CONVERT_EXPR
9513 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9514 offset = TREE_OPERAND (offset, 0);
9516 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9517 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9518 if (TREE_CODE (offset) != BIT_AND_EXPR
9519 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9520 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9521 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9524 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9525 It must be NEGATE_EXPR. Then strip any more conversions. */
9526 offset = TREE_OPERAND (offset, 0);
9527 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9528 || TREE_CODE (offset) == NOP_EXPR
9529 || TREE_CODE (offset) == CONVERT_EXPR)
9530 offset = TREE_OPERAND (offset, 0);
9532 if (TREE_CODE (offset) != NEGATE_EXPR)
9535 offset = TREE_OPERAND (offset, 0);
9536 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9537 || TREE_CODE (offset) == NOP_EXPR
9538 || TREE_CODE (offset) == CONVERT_EXPR)
9539 offset = TREE_OPERAND (offset, 0);
9541 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9542 whose type is the same as EXP. */
9543 return (TREE_CODE (offset) == ADDR_EXPR
9544 && (TREE_OPERAND (offset, 0) == exp
9545 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9546 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9547 == TREE_TYPE (exp)))));
9550 /* Return the tree node if an ARG corresponds to a string constant or zero
9551 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9552 in bytes within the string that ARG is accessing. The type of the
9553 offset will be `sizetype'. */
9556 string_constant (arg, ptr_offset)
9562 if (TREE_CODE (arg) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9565 *ptr_offset = size_zero_node;
9566 return TREE_OPERAND (arg, 0);
9568 else if (TREE_CODE (arg) == PLUS_EXPR)
9570 tree arg0 = TREE_OPERAND (arg, 0);
9571 tree arg1 = TREE_OPERAND (arg, 1);
9576 if (TREE_CODE (arg0) == ADDR_EXPR
9577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9579 *ptr_offset = convert (sizetype, arg1);
9580 return TREE_OPERAND (arg0, 0);
9582 else if (TREE_CODE (arg1) == ADDR_EXPR
9583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9585 *ptr_offset = convert (sizetype, arg0);
9586 return TREE_OPERAND (arg1, 0);
9593 /* Expand code for a post- or pre- increment or decrement
9594 and return the RTX for the result.
9595 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9598 expand_increment (exp, post, ignore)
9604 tree incremented = TREE_OPERAND (exp, 0);
9605 optab this_optab = add_optab;
9607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9608 int op0_is_copy = 0;
9609 int single_insn = 0;
9610 /* 1 means we can't store into OP0 directly,
9611 because it is a subreg narrower than a word,
9612 and we don't dare clobber the rest of the word. */
9615 /* Stabilize any component ref that might need to be
9616 evaluated more than once below. */
9618 || TREE_CODE (incremented) == BIT_FIELD_REF
9619 || (TREE_CODE (incremented) == COMPONENT_REF
9620 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9621 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9622 incremented = stabilize_reference (incremented);
9623 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9624 ones into save exprs so that they don't accidentally get evaluated
9625 more than once by the code below. */
9626 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9627 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9628 incremented = save_expr (incremented);
9630 /* Compute the operands as RTX.
9631 Note whether OP0 is the actual lvalue or a copy of it:
9632 I believe it is a copy iff it is a register or subreg
9633 and insns were generated in computing it. */
9635 temp = get_last_insn ();
9636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9639 in place but instead must do sign- or zero-extension during assignment,
9640 so we copy it into a new register and let the code below use it as
9643 Note that we can safely modify this SUBREG since it is know not to be
9644 shared (it was made by the expand_expr call above). */
9646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9649 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9653 else if (GET_CODE (op0) == SUBREG
9654 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9656 /* We cannot increment this SUBREG in place. If we are
9657 post-incrementing, get a copy of the old value. Otherwise,
9658 just mark that we cannot increment in place. */
9660 op0 = copy_to_reg (op0);
9665 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9666 && temp != get_last_insn ());
9667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9669 /* Decide whether incrementing or decrementing. */
9670 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9671 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9672 this_optab = sub_optab;
9674 /* Convert decrement by a constant into a negative increment. */
9675 if (this_optab == sub_optab
9676 && GET_CODE (op1) == CONST_INT)
9678 op1 = GEN_INT (-INTVAL (op1));
9679 this_optab = add_optab;
9682 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9683 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9685 /* For a preincrement, see if we can do this with a single instruction. */
9688 icode = (int) this_optab->handlers[(int) mode].insn_code;
9689 if (icode != (int) CODE_FOR_nothing
9690 /* Make sure that OP0 is valid for operands 0 and 1
9691 of the insn we want to queue. */
9692 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9693 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9694 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9698 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9699 then we cannot just increment OP0. We must therefore contrive to
9700 increment the original value. Then, for postincrement, we can return
9701 OP0 since it is a copy of the old value. For preincrement, expand here
9702 unless we can do it with a single insn.
9704 Likewise if storing directly into OP0 would clobber high bits
9705 we need to preserve (bad_subreg). */
9706 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9708 /* This is the easiest way to increment the value wherever it is.
9709 Problems with multiple evaluation of INCREMENTED are prevented
9710 because either (1) it is a component_ref or preincrement,
9711 in which case it was stabilized above, or (2) it is an array_ref
9712 with constant index in an array in a register, which is
9713 safe to reevaluate. */
9714 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9716 ? MINUS_EXPR : PLUS_EXPR),
9719 TREE_OPERAND (exp, 1));
9721 while (TREE_CODE (incremented) == NOP_EXPR
9722 || TREE_CODE (incremented) == CONVERT_EXPR)
9724 newexp = convert (TREE_TYPE (incremented), newexp);
9725 incremented = TREE_OPERAND (incremented, 0);
9728 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9729 return post ? op0 : temp;
9734 /* We have a true reference to the value in OP0.
9735 If there is an insn to add or subtract in this mode, queue it.
9736 Queueing the increment insn avoids the register shuffling
9737 that often results if we must increment now and first save
9738 the old value for subsequent use. */
9740 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9741 op0 = stabilize (op0);
9744 icode = (int) this_optab->handlers[(int) mode].insn_code;
9745 if (icode != (int) CODE_FOR_nothing
9746 /* Make sure that OP0 is valid for operands 0 and 1
9747 of the insn we want to queue. */
9748 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9749 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9751 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9752 op1 = force_reg (mode, op1);
9754 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9756 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9758 rtx addr = (general_operand (XEXP (op0, 0), mode)
9759 ? force_reg (Pmode, XEXP (op0, 0))
9760 : copy_to_reg (XEXP (op0, 0)));
9763 op0 = replace_equiv_address (op0, addr);
9764 temp = force_reg (GET_MODE (op0), op0);
9765 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9766 op1 = force_reg (mode, op1);
9768 /* The increment queue is LIFO, thus we have to `queue'
9769 the instructions in reverse order. */
9770 enqueue_insn (op0, gen_move_insn (op0, temp));
9771 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9776 /* Preincrement, or we can't increment with one simple insn. */
9778 /* Save a copy of the value before inc or dec, to return it later. */
9779 temp = value = copy_to_reg (op0);
9781 /* Arrange to return the incremented value. */
9782 /* Copy the rtx because expand_binop will protect from the queue,
9783 and the results of that would be invalid for us to return
9784 if our caller does emit_queue before using our result. */
9785 temp = copy_rtx (value = op0);
9787 /* Increment however we can. */
9788 op1 = expand_binop (mode, this_optab, value, op1, op0,
9789 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9791 /* Make sure the value is stored into OP0. */
9793 emit_move_insn (op0, op1);
9798 /* Generate code to calculate EXP using a store-flag instruction
9799 and return an rtx for the result. EXP is either a comparison
9800 or a TRUTH_NOT_EXPR whose operand is a comparison.
9802 If TARGET is nonzero, store the result there if convenient.
9804 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9807 Return zero if there is no suitable set-flag instruction
9808 available on this machine.
9810 Once expand_expr has been called on the arguments of the comparison,
9811 we are committed to doing the store flag, since it is not safe to
9812 re-evaluate the expression. We emit the store-flag insn by calling
9813 emit_store_flag, but only expand the arguments if we have a reason
9814 to believe that emit_store_flag will be successful. If we think that
9815 it will, but it isn't, we have to simulate the store-flag with a
9816 set/jump/set sequence. */
9819 do_store_flag (exp, target, mode, only_cheap)
9822 enum machine_mode mode;
9826 tree arg0, arg1, type;
9828 enum machine_mode operand_mode;
9832 enum insn_code icode;
9833 rtx subtarget = target;
9836 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9837 result at the end. We can't simply invert the test since it would
9838 have already been inverted if it were valid. This case occurs for
9839 some floating-point comparisons. */
9841 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9842 invert = 1, exp = TREE_OPERAND (exp, 0);
9844 arg0 = TREE_OPERAND (exp, 0);
9845 arg1 = TREE_OPERAND (exp, 1);
9847 /* Don't crash if the comparison was erroneous. */
9848 if (arg0 == error_mark_node || arg1 == error_mark_node)
9851 type = TREE_TYPE (arg0);
9852 operand_mode = TYPE_MODE (type);
9853 unsignedp = TREE_UNSIGNED (type);
9855 /* We won't bother with BLKmode store-flag operations because it would mean
9856 passing a lot of information to emit_store_flag. */
9857 if (operand_mode == BLKmode)
9860 /* We won't bother with store-flag operations involving function pointers
9861 when function pointers must be canonicalized before comparisons. */
9862 #ifdef HAVE_canonicalize_funcptr_for_compare
9863 if (HAVE_canonicalize_funcptr_for_compare
9864 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9865 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9867 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9868 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9869 == FUNCTION_TYPE))))
9876 /* Get the rtx comparison code to use. We know that EXP is a comparison
9877 operation of some type. Some comparisons against 1 and -1 can be
9878 converted to comparisons with zero. Do so here so that the tests
9879 below will be aware that we have a comparison with zero. These
9880 tests will not catch constants in the first operand, but constants
9881 are rarely passed as the first operand. */
9883 switch (TREE_CODE (exp))
9892 if (integer_onep (arg1))
9893 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9895 code = unsignedp ? LTU : LT;
9898 if (! unsignedp && integer_all_onesp (arg1))
9899 arg1 = integer_zero_node, code = LT;
9901 code = unsignedp ? LEU : LE;
9904 if (! unsignedp && integer_all_onesp (arg1))
9905 arg1 = integer_zero_node, code = GE;
9907 code = unsignedp ? GTU : GT;
9910 if (integer_onep (arg1))
9911 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9913 code = unsignedp ? GEU : GE;
9916 case UNORDERED_EXPR:
9942 /* Put a constant second. */
9943 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9945 tem = arg0; arg0 = arg1; arg1 = tem;
9946 code = swap_condition (code);
9949 /* If this is an equality or inequality test of a single bit, we can
9950 do this by shifting the bit being tested to the low-order bit and
9951 masking the result with the constant 1. If the condition was EQ,
9952 we xor it with 1. This does not require an scc insn and is faster
9953 than an scc insn even if we have it. */
9955 if ((code == NE || code == EQ)
9956 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9957 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9959 tree inner = TREE_OPERAND (arg0, 0);
9960 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9963 /* If INNER is a right shift of a constant and it plus BITNUM does
9964 not overflow, adjust BITNUM and INNER. */
9966 if (TREE_CODE (inner) == RSHIFT_EXPR
9967 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9968 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9969 && bitnum < TYPE_PRECISION (type)
9970 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9971 bitnum - TYPE_PRECISION (type)))
9973 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9974 inner = TREE_OPERAND (inner, 0);
9977 /* If we are going to be able to omit the AND below, we must do our
9978 operations as unsigned. If we must use the AND, we have a choice.
9979 Normally unsigned is faster, but for some machines signed is. */
9980 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9981 #ifdef LOAD_EXTEND_OP
9982 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9988 if (! get_subtarget (subtarget)
9989 || GET_MODE (subtarget) != operand_mode
9990 || ! safe_from_p (subtarget, inner, 1))
9993 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9996 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
9997 size_int (bitnum), subtarget, ops_unsignedp);
9999 if (GET_MODE (op0) != mode)
10000 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10002 if ((code == EQ && ! invert) || (code == NE && invert))
10003 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10004 ops_unsignedp, OPTAB_LIB_WIDEN);
10006 /* Put the AND last so it can combine with more things. */
10007 if (bitnum != TYPE_PRECISION (type) - 1)
10008 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10013 /* Now see if we are likely to be able to do this. Return if not. */
10014 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10017 icode = setcc_gen_code[(int) code];
10018 if (icode == CODE_FOR_nothing
10019 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10021 /* We can only do this if it is one of the special cases that
10022 can be handled without an scc insn. */
10023 if ((code == LT && integer_zerop (arg1))
10024 || (! only_cheap && code == GE && integer_zerop (arg1)))
10026 else if (BRANCH_COST >= 0
10027 && ! only_cheap && (code == NE || code == EQ)
10028 && TREE_CODE (type) != REAL_TYPE
10029 && ((abs_optab->handlers[(int) operand_mode].insn_code
10030 != CODE_FOR_nothing)
10031 || (ffs_optab->handlers[(int) operand_mode].insn_code
10032 != CODE_FOR_nothing)))
10038 if (! get_subtarget (target)
10039 || GET_MODE (subtarget) != operand_mode
10040 || ! safe_from_p (subtarget, arg1, 1))
10043 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10044 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10047 target = gen_reg_rtx (mode);
10049 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10050 because, if the emit_store_flag does anything it will succeed and
10051 OP0 and OP1 will not be used subsequently. */
10053 result = emit_store_flag (target, code,
10054 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10055 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10056 operand_mode, unsignedp, 1);
10061 result = expand_binop (mode, xor_optab, result, const1_rtx,
10062 result, 0, OPTAB_LIB_WIDEN);
10066 /* If this failed, we have to do this with set/compare/jump/set code. */
10067 if (GET_CODE (target) != REG
10068 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10069 target = gen_reg_rtx (GET_MODE (target));
10071 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10072 result = compare_from_rtx (op0, op1, code, unsignedp,
10073 operand_mode, NULL_RTX);
10074 if (GET_CODE (result) == CONST_INT)
10075 return (((result == const0_rtx && ! invert)
10076 || (result != const0_rtx && invert))
10077 ? const0_rtx : const1_rtx);
10079 /* The code of RESULT may not match CODE if compare_from_rtx
10080 decided to swap its operands and reverse the original code.
10082 We know that compare_from_rtx returns either a CONST_INT or
10083 a new comparison code, so it is safe to just extract the
10084 code from RESULT. */
10085 code = GET_CODE (result);
10087 label = gen_label_rtx ();
10088 if (bcc_gen_fctn[(int) code] == 0)
10091 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10092 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10093 emit_label (label);
10099 /* Stubs in case we haven't got a casesi insn. */
10100 #ifndef HAVE_casesi
10101 # define HAVE_casesi 0
10102 # define gen_casesi(a, b, c, d, e) (0)
10103 # define CODE_FOR_casesi CODE_FOR_nothing
10106 /* If the machine does not have a case insn that compares the bounds,
10107 this means extra overhead for dispatch tables, which raises the
10108 threshold for using them. */
10109 #ifndef CASE_VALUES_THRESHOLD
10110 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10111 #endif /* CASE_VALUES_THRESHOLD */
10114 case_values_threshold ()
10116 return CASE_VALUES_THRESHOLD;
10119 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10120 0 otherwise (i.e. if there is no casesi instruction). */
10122 try_casesi (index_type, index_expr, minval, range,
10123 table_label, default_label)
10124 tree index_type, index_expr, minval, range;
10125 rtx table_label ATTRIBUTE_UNUSED;
10128 enum machine_mode index_mode = SImode;
10129 int index_bits = GET_MODE_BITSIZE (index_mode);
10130 rtx op1, op2, index;
10131 enum machine_mode op_mode;
10136 /* Convert the index to SImode. */
10137 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10139 enum machine_mode omode = TYPE_MODE (index_type);
10140 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10142 /* We must handle the endpoints in the original mode. */
10143 index_expr = build (MINUS_EXPR, index_type,
10144 index_expr, minval);
10145 minval = integer_zero_node;
10146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10147 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10148 omode, 1, default_label);
10149 /* Now we can safely truncate. */
10150 index = convert_to_mode (index_mode, index, 0);
10154 if (TYPE_MODE (index_type) != index_mode)
10156 index_expr = convert ((*lang_hooks.types.type_for_size)
10157 (index_bits, 0), index_expr);
10158 index_type = TREE_TYPE (index_expr);
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10164 index = protect_from_queue (index, 0);
10165 do_pending_stack_adjust ();
10167 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10168 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10170 index = copy_to_mode_reg (op_mode, index);
10172 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10174 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10175 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10176 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10177 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10179 op1 = copy_to_mode_reg (op_mode, op1);
10181 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10183 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10184 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10185 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10186 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10188 op2 = copy_to_mode_reg (op_mode, op2);
10190 emit_jump_insn (gen_casesi (index, op1, op2,
10191 table_label, default_label));
10195 /* Attempt to generate a tablejump instruction; same concept. */
10196 #ifndef HAVE_tablejump
10197 #define HAVE_tablejump 0
10198 #define gen_tablejump(x, y) (0)
10201 /* Subroutine of the next function.
10203 INDEX is the value being switched on, with the lowest value
10204 in the table already subtracted.
10205 MODE is its expected mode (needed if INDEX is constant).
10206 RANGE is the length of the jump table.
10207 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10209 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10210 index value is out of range. */
10213 do_tablejump (index, mode, range, table_label, default_label)
10214 rtx index, range, table_label, default_label;
10215 enum machine_mode mode;
10219 if (INTVAL (range) > cfun->max_jumptable_ents)
10220 cfun->max_jumptable_ents = INTVAL (range);
10222 /* Do an unsigned comparison (in the proper mode) between the index
10223 expression and the value which represents the length of the range.
10224 Since we just finished subtracting the lower bound of the range
10225 from the index expression, this comparison allows us to simultaneously
10226 check that the original index expression value is both greater than
10227 or equal to the minimum value of the range and less than or equal to
10228 the maximum value of the range. */
10230 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10233 /* If index is in range, it must fit in Pmode.
10234 Convert to Pmode so we can index with it. */
10236 index = convert_to_mode (Pmode, index, 1);
10238 /* Don't let a MEM slip thru, because then INDEX that comes
10239 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10240 and break_out_memory_refs will go to work on it and mess it up. */
10241 #ifdef PIC_CASE_VECTOR_ADDRESS
10242 if (flag_pic && GET_CODE (index) != REG)
10243 index = copy_to_mode_reg (Pmode, index);
10246 /* If flag_force_addr were to affect this address
10247 it could interfere with the tricky assumptions made
10248 about addresses that contain label-refs,
10249 which may be valid only very near the tablejump itself. */
10250 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10251 GET_MODE_SIZE, because this indicates how large insns are. The other
10252 uses should all be Pmode, because they are addresses. This code
10253 could fail if addresses and insns are not the same size. */
10254 index = gen_rtx_PLUS (Pmode,
10255 gen_rtx_MULT (Pmode, index,
10256 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10257 gen_rtx_LABEL_REF (Pmode, table_label));
10258 #ifdef PIC_CASE_VECTOR_ADDRESS
10260 index = PIC_CASE_VECTOR_ADDRESS (index);
10263 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10264 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10265 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10266 RTX_UNCHANGING_P (vector) = 1;
10267 convert_move (temp, vector, 0);
10269 emit_jump_insn (gen_tablejump (temp, table_label));
10271 /* If we are generating PIC code or if the table is PC-relative, the
10272 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10273 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10278 try_tablejump (index_type, index_expr, minval, range,
10279 table_label, default_label)
10280 tree index_type, index_expr, minval, range;
10281 rtx table_label, default_label;
10285 if (! HAVE_tablejump)
10288 index_expr = fold (build (MINUS_EXPR, index_type,
10289 convert (index_type, index_expr),
10290 convert (index_type, minval)));
10291 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10293 index = protect_from_queue (index, 0);
10294 do_pending_stack_adjust ();
10296 do_tablejump (index, TYPE_MODE (index_type),
10297 convert_modes (TYPE_MODE (index_type),
10298 TYPE_MODE (TREE_TYPE (range)),
10299 expand_expr (range, NULL_RTX,
10301 TREE_UNSIGNED (TREE_TYPE (range))),
10302 table_label, default_label);
10306 /* Nonzero if the mode is a valid vector mode for this architecture.
10307 This returns nonzero even if there is no hardware support for the
10308 vector mode, but we can emulate with narrower modes. */
10311 vector_mode_valid_p (mode)
10312 enum machine_mode mode;
10314 enum mode_class class = GET_MODE_CLASS (mode);
10315 enum machine_mode innermode;
10317 /* Doh! What's going on? */
10318 if (class != MODE_VECTOR_INT
10319 && class != MODE_VECTOR_FLOAT)
10322 /* Hardware support. Woo hoo! */
10323 if (VECTOR_MODE_SUPPORTED_P (mode))
10326 innermode = GET_MODE_INNER (mode);
10328 /* We should probably return 1 if requesting V4DI and we have no DI,
10329 but we have V2DI, but this is probably very unlikely. */
10331 /* If we have support for the inner mode, we can safely emulate it.
10332 We may not have V2DI, but me can emulate with a pair of DIs. */
10333 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10336 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10338 const_vector_from_tree (exp)
10344 enum machine_mode inner, mode;
10346 mode = TYPE_MODE (TREE_TYPE (exp));
10348 if (is_zeros_p (exp))
10349 return CONST0_RTX (mode);
10351 units = GET_MODE_NUNITS (mode);
10352 inner = GET_MODE_INNER (mode);
10354 v = rtvec_alloc (units);
10356 link = TREE_VECTOR_CST_ELTS (exp);
10357 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10359 elt = TREE_VALUE (link);
10361 if (TREE_CODE (elt) == REAL_CST)
10362 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10365 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10366 TREE_INT_CST_HIGH (elt),
10370 return gen_rtx_raw_CONST_VECTOR (mode, v);
10373 #include "gt-expr.h"