1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
611 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
612 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
616 if (to_real != from_real)
623 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
625 /* Try converting directly if the insn is supported. */
626 if ((code = can_extend_p (to_mode, from_mode, 0))
629 emit_unop_insn (code, to, from, UNKNOWN);
634 #ifdef HAVE_trunchfqf2
635 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctqfqf2
642 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncsfqf2
649 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncdfqf2
656 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncxfqf2
663 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
669 #ifdef HAVE_trunctfqf2
670 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
672 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
677 #ifdef HAVE_trunctqfhf2
678 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncsfhf2
685 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncdfhf2
692 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncxfhf2
699 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
705 #ifdef HAVE_trunctfhf2
706 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
708 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncsftqf2
714 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncdftqf2
721 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncxftqf2
728 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
734 #ifdef HAVE_trunctftqf2
735 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
737 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncdfsf2
743 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_truncxfsf2
750 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_trunctfsf2
757 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
759 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
763 #ifdef HAVE_truncxfdf2
764 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
770 #ifdef HAVE_trunctfdf2
771 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
773 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
785 libcall = extendsfdf2_libfunc;
789 libcall = extendsfxf2_libfunc;
793 libcall = extendsftf2_libfunc;
805 libcall = truncdfsf2_libfunc;
809 libcall = extenddfxf2_libfunc;
813 libcall = extenddftf2_libfunc;
825 libcall = truncxfsf2_libfunc;
829 libcall = truncxfdf2_libfunc;
841 libcall = trunctfsf2_libfunc;
845 libcall = trunctfdf2_libfunc;
857 if (libcall == (rtx) 0)
858 /* This conversion is not implemented yet. */
862 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
864 insns = get_insns ();
866 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
871 /* Now both modes are integers. */
873 /* Handle expanding beyond a word. */
874 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
875 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
882 enum machine_mode lowpart_mode;
883 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
885 /* Try converting directly if the insn is supported. */
886 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
889 /* If FROM is a SUBREG, put it into a register. Do this
890 so that we always generate the same set of insns for
891 better cse'ing; if an intermediate assignment occurred,
892 we won't be doing the operation directly on the SUBREG. */
893 if (optimize > 0 && GET_CODE (from) == SUBREG)
894 from = force_reg (from_mode, from);
895 emit_unop_insn (code, to, from, equiv_code);
898 /* Next, try converting via full word. */
899 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
900 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
901 != CODE_FOR_nothing))
903 if (GET_CODE (to) == REG)
904 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
905 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
906 emit_unop_insn (code, to,
907 gen_lowpart (word_mode, to), equiv_code);
911 /* No special multiword conversion insn; do it by hand. */
914 /* Since we will turn this into a no conflict block, we must ensure
915 that the source does not overlap the target. */
917 if (reg_overlap_mentioned_p (to, from))
918 from = force_reg (from_mode, from);
920 /* Get a copy of FROM widened to a word, if necessary. */
921 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
922 lowpart_mode = word_mode;
924 lowpart_mode = from_mode;
926 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
928 lowpart = gen_lowpart (lowpart_mode, to);
929 emit_move_insn (lowpart, lowfrom);
931 /* Compute the value to put in each remaining word. */
933 fill_value = const0_rtx;
938 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
939 && STORE_FLAG_VALUE == -1)
941 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
943 fill_value = gen_reg_rtx (word_mode);
944 emit_insn (gen_slt (fill_value));
950 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
951 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
953 fill_value = convert_to_mode (word_mode, fill_value, 1);
957 /* Fill the remaining words. */
958 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
960 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
961 rtx subword = operand_subword (to, index, 1, to_mode);
966 if (fill_value != subword)
967 emit_move_insn (subword, fill_value);
970 insns = get_insns ();
973 emit_no_conflict_block (insns, to, from, NULL_RTX,
974 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
978 /* Truncating multi-word to a word or less. */
979 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
980 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
982 if (!((GET_CODE (from) == MEM
983 && ! MEM_VOLATILE_P (from)
984 && direct_load[(int) to_mode]
985 && ! mode_dependent_address_p (XEXP (from, 0)))
986 || GET_CODE (from) == REG
987 || GET_CODE (from) == SUBREG))
988 from = force_reg (from_mode, from);
989 convert_move (to, gen_lowpart (word_mode, from), 0);
993 /* Handle pointer conversion. */ /* SPEE 900220. */
994 if (to_mode == PQImode)
996 if (from_mode != QImode)
997 from = convert_to_mode (QImode, from, unsignedp);
999 #ifdef HAVE_truncqipqi2
1000 if (HAVE_truncqipqi2)
1002 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
1005 #endif /* HAVE_truncqipqi2 */
1009 if (from_mode == PQImode)
1011 if (to_mode != QImode)
1013 from = convert_to_mode (QImode, from, unsignedp);
1018 #ifdef HAVE_extendpqiqi2
1019 if (HAVE_extendpqiqi2)
1021 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1024 #endif /* HAVE_extendpqiqi2 */
1029 if (to_mode == PSImode)
1031 if (from_mode != SImode)
1032 from = convert_to_mode (SImode, from, unsignedp);
1034 #ifdef HAVE_truncsipsi2
1035 if (HAVE_truncsipsi2)
1037 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1040 #endif /* HAVE_truncsipsi2 */
1044 if (from_mode == PSImode)
1046 if (to_mode != SImode)
1048 from = convert_to_mode (SImode, from, unsignedp);
1053 #ifdef HAVE_extendpsisi2
1054 if (! unsignedp && HAVE_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_extendpsisi2 */
1060 #ifdef HAVE_zero_extendpsisi2
1061 if (unsignedp && HAVE_zero_extendpsisi2)
1063 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1066 #endif /* HAVE_zero_extendpsisi2 */
1071 if (to_mode == PDImode)
1073 if (from_mode != DImode)
1074 from = convert_to_mode (DImode, from, unsignedp);
1076 #ifdef HAVE_truncdipdi2
1077 if (HAVE_truncdipdi2)
1079 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1082 #endif /* HAVE_truncdipdi2 */
1086 if (from_mode == PDImode)
1088 if (to_mode != DImode)
1090 from = convert_to_mode (DImode, from, unsignedp);
1095 #ifdef HAVE_extendpdidi2
1096 if (HAVE_extendpdidi2)
1098 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1101 #endif /* HAVE_extendpdidi2 */
1106 /* Now follow all the conversions between integers
1107 no more than a word long. */
1109 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1110 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1111 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1112 GET_MODE_BITSIZE (from_mode)))
1114 if (!((GET_CODE (from) == MEM
1115 && ! MEM_VOLATILE_P (from)
1116 && direct_load[(int) to_mode]
1117 && ! mode_dependent_address_p (XEXP (from, 0)))
1118 || GET_CODE (from) == REG
1119 || GET_CODE (from) == SUBREG))
1120 from = force_reg (from_mode, from);
1121 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1122 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1123 from = copy_to_reg (from);
1124 emit_move_insn (to, gen_lowpart (to_mode, from));
1128 /* Handle extension. */
1129 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1131 /* Convert directly if that works. */
1132 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1133 != CODE_FOR_nothing)
1136 from = force_not_mem (from);
1138 emit_unop_insn (code, to, from, equiv_code);
1143 enum machine_mode intermediate;
1147 /* Search for a mode to convert via. */
1148 for (intermediate = from_mode; intermediate != VOIDmode;
1149 intermediate = GET_MODE_WIDER_MODE (intermediate))
1150 if (((can_extend_p (to_mode, intermediate, unsignedp)
1151 != CODE_FOR_nothing)
1152 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1153 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1154 GET_MODE_BITSIZE (intermediate))))
1155 && (can_extend_p (intermediate, from_mode, unsignedp)
1156 != CODE_FOR_nothing))
1158 convert_move (to, convert_to_mode (intermediate, from,
1159 unsignedp), unsignedp);
1163 /* No suitable intermediate mode.
1164 Generate what we need with shifts. */
1165 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1166 - GET_MODE_BITSIZE (from_mode), 0);
1167 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1168 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1170 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1173 emit_move_insn (to, tmp);
1178 /* Support special truncate insns for certain modes. */
1180 if (from_mode == DImode && to_mode == SImode)
1182 #ifdef HAVE_truncdisi2
1183 if (HAVE_truncdisi2)
1185 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == DImode && to_mode == HImode)
1195 #ifdef HAVE_truncdihi2
1196 if (HAVE_truncdihi2)
1198 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == DImode && to_mode == QImode)
1208 #ifdef HAVE_truncdiqi2
1209 if (HAVE_truncdiqi2)
1211 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == SImode && to_mode == HImode)
1221 #ifdef HAVE_truncsihi2
1222 if (HAVE_truncsihi2)
1224 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 if (from_mode == SImode && to_mode == QImode)
1234 #ifdef HAVE_truncsiqi2
1235 if (HAVE_truncsiqi2)
1237 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 if (from_mode == HImode && to_mode == QImode)
1247 #ifdef HAVE_trunchiqi2
1248 if (HAVE_trunchiqi2)
1250 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 if (from_mode == TImode && to_mode == DImode)
1260 #ifdef HAVE_trunctidi2
1261 if (HAVE_trunctidi2)
1263 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1267 convert_move (to, force_reg (from_mode, from), unsignedp);
1271 if (from_mode == TImode && to_mode == SImode)
1273 #ifdef HAVE_trunctisi2
1274 if (HAVE_trunctisi2)
1276 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1280 convert_move (to, force_reg (from_mode, from), unsignedp);
1284 if (from_mode == TImode && to_mode == HImode)
1286 #ifdef HAVE_trunctihi2
1287 if (HAVE_trunctihi2)
1289 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1293 convert_move (to, force_reg (from_mode, from), unsignedp);
1297 if (from_mode == TImode && to_mode == QImode)
1299 #ifdef HAVE_trunctiqi2
1300 if (HAVE_trunctiqi2)
1302 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1306 convert_move (to, force_reg (from_mode, from), unsignedp);
1310 /* Handle truncation of volatile memrefs, and so on;
1311 the things that couldn't be truncated directly,
1312 and for which there was no special instruction. */
1313 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1315 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1316 emit_move_insn (to, temp);
1320 /* Mode combination is not recognized. */
1324 /* Return an rtx for a value that would result
1325 from converting X to mode MODE.
1326 Both X and MODE may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1328 This can be done by referring to a part of X in place
1329 or by copying to a new temporary with conversion.
1331 This function *must not* call protect_from_queue
1332 except when putting X into an insn (in which case convert_move does it). */
1335 convert_to_mode (mode, x, unsignedp)
1336 enum machine_mode mode;
1340 return convert_modes (mode, VOIDmode, x, unsignedp);
1343 /* Return an rtx for a value that would result
1344 from converting X from mode OLDMODE to mode MODE.
1345 Both modes may be floating, or both integer.
1346 UNSIGNEDP is nonzero if X is an unsigned value.
1348 This can be done by referring to a part of X in place
1349 or by copying to a new temporary with conversion.
1351 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1353 This function *must not* call protect_from_queue
1354 except when putting X into an insn (in which case convert_move does it). */
1357 convert_modes (mode, oldmode, x, unsignedp)
1358 enum machine_mode mode, oldmode;
1364 /* If FROM is a SUBREG that indicates that we have already done at least
1365 the required extension, strip it. */
1367 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1368 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1369 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1370 x = gen_lowpart (mode, x);
1372 if (GET_MODE (x) != VOIDmode)
1373 oldmode = GET_MODE (x);
1375 if (mode == oldmode)
1378 /* There is one case that we must handle specially: If we are converting
1379 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1380 we are to interpret the constant as unsigned, gen_lowpart will do
1381 the wrong if the constant appears negative. What we want to do is
1382 make the high-order word of the constant zero, not all ones. */
1384 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1385 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1386 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1388 HOST_WIDE_INT val = INTVAL (x);
1390 if (oldmode != VOIDmode
1391 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1393 int width = GET_MODE_BITSIZE (oldmode);
1395 /* We need to zero extend VAL. */
1396 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1399 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1402 /* We can do this with a gen_lowpart if both desired and current modes
1403 are integer, and this is either a constant integer, a register, or a
1404 non-volatile MEM. Except for the constant case where MODE is no
1405 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1407 if ((GET_CODE (x) == CONST_INT
1408 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1409 || (GET_MODE_CLASS (mode) == MODE_INT
1410 && GET_MODE_CLASS (oldmode) == MODE_INT
1411 && (GET_CODE (x) == CONST_DOUBLE
1412 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1413 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1414 && direct_load[(int) mode])
1415 || (GET_CODE (x) == REG
1416 && (! HARD_REGISTER_P (x)
1417 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1418 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1419 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1421 /* ?? If we don't know OLDMODE, we have to assume here that
1422 X does not need sign- or zero-extension. This may not be
1423 the case, but it's the best we can do. */
1424 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1425 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1427 HOST_WIDE_INT val = INTVAL (x);
1428 int width = GET_MODE_BITSIZE (oldmode);
1430 /* We must sign or zero-extend in this case. Start by
1431 zero-extending, then sign extend if we need to. */
1432 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1434 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1435 val |= (HOST_WIDE_INT) (-1) << width;
1437 return gen_int_mode (val, mode);
1440 return gen_lowpart (mode, x);
1443 temp = gen_reg_rtx (mode);
1444 convert_move (temp, x, unsignedp);
1448 /* This macro is used to determine what the largest unit size that
1449 move_by_pieces can use is. */
1451 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1452 move efficiently, as opposed to MOVE_MAX which is the maximum
1453 number of bytes we can move with a single instruction. */
1455 #ifndef MOVE_MAX_PIECES
1456 #define MOVE_MAX_PIECES MOVE_MAX
1459 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1460 store efficiently. Due to internal GCC limitations, this is
1461 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1462 for an immediate constant. */
1464 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1466 /* Determine whether the LEN bytes can be moved by using several move
1467 instructions. Return nonzero if a call to move_by_pieces should
1471 can_move_by_pieces (len, align)
1472 unsigned HOST_WIDE_INT len;
1475 return MOVE_BY_PIECES_P (len, align);
1478 /* Generate several move instructions to copy LEN bytes from block FROM to
1479 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1480 and TO through protect_from_queue before calling.
1482 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1483 used to push FROM to the stack.
1485 ALIGN is maximum stack alignment we can assume.
1487 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1488 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1492 move_by_pieces (to, from, len, align, endp)
1494 unsigned HOST_WIDE_INT len;
1498 struct move_by_pieces data;
1499 rtx to_addr, from_addr = XEXP (from, 0);
1500 unsigned int max_size = MOVE_MAX_PIECES + 1;
1501 enum machine_mode mode = VOIDmode, tmode;
1502 enum insn_code icode;
1504 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1507 data.from_addr = from_addr;
1510 to_addr = XEXP (to, 0);
1513 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1514 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1516 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1523 #ifdef STACK_GROWS_DOWNWARD
1529 data.to_addr = to_addr;
1532 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1533 || GET_CODE (from_addr) == POST_INC
1534 || GET_CODE (from_addr) == POST_DEC);
1536 data.explicit_inc_from = 0;
1537 data.explicit_inc_to = 0;
1538 if (data.reverse) data.offset = len;
1541 /* If copying requires more than two move insns,
1542 copy addresses to registers (to make displacements shorter)
1543 and use post-increment if available. */
1544 if (!(data.autinc_from && data.autinc_to)
1545 && move_by_pieces_ninsns (len, align) > 2)
1547 /* Find the mode of the largest move... */
1548 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1549 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1550 if (GET_MODE_SIZE (tmode) < max_size)
1553 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1555 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1556 data.autinc_from = 1;
1557 data.explicit_inc_from = -1;
1559 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1561 data.from_addr = copy_addr_to_reg (from_addr);
1562 data.autinc_from = 1;
1563 data.explicit_inc_from = 1;
1565 if (!data.autinc_from && CONSTANT_P (from_addr))
1566 data.from_addr = copy_addr_to_reg (from_addr);
1567 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1569 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1571 data.explicit_inc_to = -1;
1573 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1575 data.to_addr = copy_addr_to_reg (to_addr);
1577 data.explicit_inc_to = 1;
1579 if (!data.autinc_to && CONSTANT_P (to_addr))
1580 data.to_addr = copy_addr_to_reg (to_addr);
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1585 align = MOVE_MAX * BITS_PER_UNIT;
1587 /* First move what we can in the largest integer mode, then go to
1588 successively smaller modes. */
1590 while (max_size > 1)
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
1597 if (mode == VOIDmode)
1600 icode = mov_optab->handlers[(int) mode].insn_code;
1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1602 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1604 max_size = GET_MODE_SIZE (mode);
1607 /* The code above should have handled everything. */
1621 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1622 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1624 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1627 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1634 to1 = adjust_address (data.to, QImode, data.offset);
1642 /* Return number of insns required to move L bytes by pieces.
1643 ALIGN (in bits) is maximum alignment we can assume. */
1645 static unsigned HOST_WIDE_INT
1646 move_by_pieces_ninsns (l, align)
1647 unsigned HOST_WIDE_INT l;
1650 unsigned HOST_WIDE_INT n_insns = 0;
1651 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1653 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1654 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1655 align = MOVE_MAX * BITS_PER_UNIT;
1657 while (max_size > 1)
1659 enum machine_mode mode = VOIDmode, tmode;
1660 enum insn_code icode;
1662 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1663 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1664 if (GET_MODE_SIZE (tmode) < max_size)
1667 if (mode == VOIDmode)
1670 icode = mov_optab->handlers[(int) mode].insn_code;
1671 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1672 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1674 max_size = GET_MODE_SIZE (mode);
1682 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1683 with move instructions for mode MODE. GENFUN is the gen_... function
1684 to make a move insn for that mode. DATA has all the other info. */
1687 move_by_pieces_1 (genfun, mode, data)
1688 rtx (*genfun) PARAMS ((rtx, ...));
1689 enum machine_mode mode;
1690 struct move_by_pieces *data;
1692 unsigned int size = GET_MODE_SIZE (mode);
1693 rtx to1 = NULL_RTX, from1;
1695 while (data->len >= size)
1698 data->offset -= size;
1702 if (data->autinc_to)
1703 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1706 to1 = adjust_address (data->to, mode, data->offset);
1709 if (data->autinc_from)
1710 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1713 from1 = adjust_address (data->from, mode, data->offset);
1715 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1716 emit_insn (gen_add2_insn (data->to_addr,
1717 GEN_INT (-(HOST_WIDE_INT)size)));
1718 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1719 emit_insn (gen_add2_insn (data->from_addr,
1720 GEN_INT (-(HOST_WIDE_INT)size)));
1723 emit_insn ((*genfun) (to1, from1));
1726 #ifdef PUSH_ROUNDING
1727 emit_single_push_insn (mode, from1, NULL);
1733 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1734 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1735 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1736 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1738 if (! data->reverse)
1739 data->offset += size;
1745 /* Emit code to move a block Y to a block X. This may be done with
1746 string-move instructions, with multiple scalar move instructions,
1747 or with a library call.
1749 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1750 SIZE is an rtx that says how long they are.
1751 ALIGN is the maximum alignment we can assume they have.
1752 METHOD describes what kind of copy this is, and what mechanisms may be used.
1754 Return the address of the new block, if memcpy is called and returns it,
1758 emit_block_move (x, y, size, method)
1760 enum block_op_methods method;
1768 case BLOCK_OP_NORMAL:
1769 may_use_call = true;
1772 case BLOCK_OP_CALL_PARM:
1773 may_use_call = block_move_libcall_safe_for_call_parm ();
1775 /* Make inhibit_defer_pop nonzero around the library call
1776 to force it to pop the arguments right away. */
1780 case BLOCK_OP_NO_LIBCALL:
1781 may_use_call = false;
1788 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1790 if (GET_MODE (x) != BLKmode)
1792 if (GET_MODE (y) != BLKmode)
1795 x = protect_from_queue (x, 1);
1796 y = protect_from_queue (y, 0);
1797 size = protect_from_queue (size, 0);
1799 if (GET_CODE (x) != MEM)
1801 if (GET_CODE (y) != MEM)
1806 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1807 can be incorrect is coming from __builtin_memcpy. */
1808 if (GET_CODE (size) == CONST_INT)
1810 x = shallow_copy_rtx (x);
1811 y = shallow_copy_rtx (y);
1812 set_mem_size (x, size);
1813 set_mem_size (y, size);
1816 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1817 move_by_pieces (x, y, INTVAL (size), align, 0);
1818 else if (emit_block_move_via_movstr (x, y, size, align))
1820 else if (may_use_call)
1821 retval = emit_block_move_via_libcall (x, y, size);
1823 emit_block_move_via_loop (x, y, size, align);
1825 if (method == BLOCK_OP_CALL_PARM)
1831 /* A subroutine of emit_block_move. Returns true if calling the
1832 block move libcall will not clobber any parameters which may have
1833 already been placed on the stack. */
1836 block_move_libcall_safe_for_call_parm ()
1842 /* Check to see whether memcpy takes all register arguments. */
1844 takes_regs_uninit, takes_regs_no, takes_regs_yes
1845 } takes_regs = takes_regs_uninit;
1849 case takes_regs_uninit:
1851 CUMULATIVE_ARGS args_so_far;
1854 fn = emit_block_move_libcall_fn (false);
1855 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1857 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1858 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1860 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1861 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1862 if (!tmp || !REG_P (tmp))
1863 goto fail_takes_regs;
1864 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1865 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1867 goto fail_takes_regs;
1869 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1872 takes_regs = takes_regs_yes;
1875 case takes_regs_yes:
1879 takes_regs = takes_regs_no;
1890 /* A subroutine of emit_block_move. Expand a movstr pattern;
1891 return true if successful. */
1894 emit_block_move_via_movstr (x, y, size, align)
1898 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1899 enum machine_mode mode;
1901 /* Since this is a move insn, we don't care about volatility. */
1904 /* Try the most limited insn first, because there's no point
1905 including more than one in the machine description unless
1906 the more limited one has some advantage. */
1908 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1909 mode = GET_MODE_WIDER_MODE (mode))
1911 enum insn_code code = movstr_optab[(int) mode];
1912 insn_operand_predicate_fn pred;
1914 if (code != CODE_FOR_nothing
1915 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1916 here because if SIZE is less than the mode mask, as it is
1917 returned by the macro, it will definitely be less than the
1918 actual mode mask. */
1919 && ((GET_CODE (size) == CONST_INT
1920 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1921 <= (GET_MODE_MASK (mode) >> 1)))
1922 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1923 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1924 || (*pred) (x, BLKmode))
1925 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1926 || (*pred) (y, BLKmode))
1927 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1928 || (*pred) (opalign, VOIDmode)))
1931 rtx last = get_last_insn ();
1934 op2 = convert_to_mode (mode, size, 1);
1935 pred = insn_data[(int) code].operand[2].predicate;
1936 if (pred != 0 && ! (*pred) (op2, mode))
1937 op2 = copy_to_mode_reg (mode, op2);
1939 /* ??? When called via emit_block_move_for_call, it'd be
1940 nice if there were some way to inform the backend, so
1941 that it doesn't fail the expansion because it thinks
1942 emitting the libcall would be more efficient. */
1944 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1952 delete_insns_since (last);
1960 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1961 Return the return value from memcpy, 0 otherwise. */
1964 emit_block_move_via_libcall (dst, src, size)
1967 rtx dst_addr, src_addr;
1968 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1969 enum machine_mode size_mode;
1972 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1974 It is unsafe to save the value generated by protect_from_queue and reuse
1975 it later. Consider what happens if emit_queue is called before the
1976 return value from protect_from_queue is used.
1978 Expansion of the CALL_EXPR below will call emit_queue before we are
1979 finished emitting RTL for argument setup. So if we are not careful we
1980 could get the wrong value for an argument.
1982 To avoid this problem we go ahead and emit code to copy the addresses of
1983 DST and SRC and SIZE into new pseudos. We can then place those new
1984 pseudos into an RTL_EXPR and use them later, even after a call to
1987 Note this is not strictly needed for library calls since they do not call
1988 emit_queue before loading their arguments. However, we may need to have
1989 library calls call emit_queue in the future since failing to do so could
1990 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1991 arguments in registers. */
1993 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1994 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1996 #ifdef POINTERS_EXTEND_UNSIGNED
1997 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1998 src_addr = convert_memory_address (ptr_mode, src_addr);
2001 dst_tree = make_tree (ptr_type_node, dst_addr);
2002 src_tree = make_tree (ptr_type_node, src_addr);
2004 if (TARGET_MEM_FUNCTIONS)
2005 size_mode = TYPE_MODE (sizetype);
2007 size_mode = TYPE_MODE (unsigned_type_node);
2009 size = convert_to_mode (size_mode, size, 1);
2010 size = copy_to_mode_reg (size_mode, size);
2012 /* It is incorrect to use the libcall calling conventions to call
2013 memcpy in this context. This could be a user call to memcpy and
2014 the user may wish to examine the return value from memcpy. For
2015 targets where libcalls and normal calls have different conventions
2016 for returning pointers, we could end up generating incorrect code.
2018 For convenience, we generate the call to bcopy this way as well. */
2020 if (TARGET_MEM_FUNCTIONS)
2021 size_tree = make_tree (sizetype, size);
2023 size_tree = make_tree (unsigned_type_node, size);
2025 fn = emit_block_move_libcall_fn (true);
2026 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2027 if (TARGET_MEM_FUNCTIONS)
2029 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2030 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2034 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2035 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2038 /* Now we have to build up the CALL_EXPR itself. */
2039 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2040 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2041 call_expr, arg_list, NULL_TREE);
2042 TREE_SIDE_EFFECTS (call_expr) = 1;
2044 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2046 /* If we are initializing a readonly value, show the above call clobbered
2047 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2048 the delay slot scheduler might overlook conflicts and take nasty
2050 if (RTX_UNCHANGING_P (dst))
2051 add_function_usage_to
2052 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2053 gen_rtx_CLOBBER (VOIDmode, dst),
2056 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2059 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2060 for the function we use for block copies. The first time FOR_CALL
2061 is true, we call assemble_external. */
2063 static GTY(()) tree block_move_fn;
2066 init_block_move_fn (asmspec)
2067 const char *asmspec;
2073 if (TARGET_MEM_FUNCTIONS)
2075 fn = get_identifier ("memcpy");
2076 args = build_function_type_list (ptr_type_node, ptr_type_node,
2077 const_ptr_type_node, sizetype,
2082 fn = get_identifier ("bcopy");
2083 args = build_function_type_list (void_type_node, const_ptr_type_node,
2084 ptr_type_node, unsigned_type_node,
2088 fn = build_decl (FUNCTION_DECL, fn, args);
2089 DECL_EXTERNAL (fn) = 1;
2090 TREE_PUBLIC (fn) = 1;
2091 DECL_ARTIFICIAL (fn) = 1;
2092 TREE_NOTHROW (fn) = 1;
2099 SET_DECL_RTL (block_move_fn, NULL_RTX);
2100 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2105 emit_block_move_libcall_fn (for_call)
2108 static bool emitted_extern;
2111 init_block_move_fn (NULL);
2113 if (for_call && !emitted_extern)
2115 emitted_extern = true;
2116 make_decl_rtl (block_move_fn, NULL);
2117 assemble_external (block_move_fn);
2120 return block_move_fn;
2123 /* A subroutine of emit_block_move. Copy the data via an explicit
2124 loop. This is used only when libcalls are forbidden. */
2125 /* ??? It'd be nice to copy in hunks larger than QImode. */
2128 emit_block_move_via_loop (x, y, size, align)
2130 unsigned int align ATTRIBUTE_UNUSED;
2132 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2133 enum machine_mode iter_mode;
2135 iter_mode = GET_MODE (size);
2136 if (iter_mode == VOIDmode)
2137 iter_mode = word_mode;
2139 top_label = gen_label_rtx ();
2140 cmp_label = gen_label_rtx ();
2141 iter = gen_reg_rtx (iter_mode);
2143 emit_move_insn (iter, const0_rtx);
2145 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2146 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2147 do_pending_stack_adjust ();
2149 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2151 emit_jump (cmp_label);
2152 emit_label (top_label);
2154 tmp = convert_modes (Pmode, iter_mode, iter, true);
2155 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2156 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2157 x = change_address (x, QImode, x_addr);
2158 y = change_address (y, QImode, y_addr);
2160 emit_move_insn (x, y);
2162 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2163 true, OPTAB_LIB_WIDEN);
2165 emit_move_insn (iter, tmp);
2167 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2168 emit_label (cmp_label);
2170 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2173 emit_note (NULL, NOTE_INSN_LOOP_END);
2176 /* Copy all or part of a value X into registers starting at REGNO.
2177 The number of registers to be filled is NREGS. */
2180 move_block_to_reg (regno, x, nregs, mode)
2184 enum machine_mode mode;
2187 #ifdef HAVE_load_multiple
2195 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2196 x = validize_mem (force_const_mem (mode, x));
2198 /* See if the machine can do this with a load multiple insn. */
2199 #ifdef HAVE_load_multiple
2200 if (HAVE_load_multiple)
2202 last = get_last_insn ();
2203 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2211 delete_insns_since (last);
2215 for (i = 0; i < nregs; i++)
2216 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2217 operand_subword_force (x, i, mode));
2220 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2221 The number of registers to be filled is NREGS. */
2224 move_block_from_reg (regno, x, nregs)
2234 /* See if the machine can do this with a store multiple insn. */
2235 #ifdef HAVE_store_multiple
2236 if (HAVE_store_multiple)
2238 rtx last = get_last_insn ();
2239 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2247 delete_insns_since (last);
2251 for (i = 0; i < nregs; i++)
2253 rtx tem = operand_subword (x, i, 1, BLKmode);
2258 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2262 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2263 ORIG, where ORIG is a non-consecutive group of registers represented by
2264 a PARALLEL. The clone is identical to the original except in that the
2265 original set of registers is replaced by a new set of pseudo registers.
2266 The new set has the same modes as the original set. */
2269 gen_group_rtx (orig)
2275 if (GET_CODE (orig) != PARALLEL)
2278 length = XVECLEN (orig, 0);
2279 tmps = (rtx *) alloca (sizeof (rtx) * length);
2281 /* Skip a NULL entry in first slot. */
2282 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2287 for (; i < length; i++)
2289 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2290 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2292 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2295 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2298 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2299 registers represented by a PARALLEL. SSIZE represents the total size of
2300 block SRC in bytes, or -1 if not known. */
2301 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2302 the balance will be in what would be the low-order memory addresses, i.e.
2303 left justified for big endian, right justified for little endian. This
2304 happens to be true for the targets currently using this support. If this
2305 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2309 emit_group_load (dst, orig_src, ssize)
2316 if (GET_CODE (dst) != PARALLEL)
2319 /* Check for a NULL entry, used to indicate that the parameter goes
2320 both on the stack and in registers. */
2321 if (XEXP (XVECEXP (dst, 0, 0), 0))
2326 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2328 /* Process the pieces. */
2329 for (i = start; i < XVECLEN (dst, 0); i++)
2331 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2332 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2333 unsigned int bytelen = GET_MODE_SIZE (mode);
2336 /* Handle trailing fragments that run over the size of the struct. */
2337 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2339 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2340 bytelen = ssize - bytepos;
2345 /* If we won't be loading directly from memory, protect the real source
2346 from strange tricks we might play; but make sure that the source can
2347 be loaded directly into the destination. */
2349 if (GET_CODE (orig_src) != MEM
2350 && (!CONSTANT_P (orig_src)
2351 || (GET_MODE (orig_src) != mode
2352 && GET_MODE (orig_src) != VOIDmode)))
2354 if (GET_MODE (orig_src) == VOIDmode)
2355 src = gen_reg_rtx (mode);
2357 src = gen_reg_rtx (GET_MODE (orig_src));
2359 emit_move_insn (src, orig_src);
2362 /* Optimize the access just a bit. */
2363 if (GET_CODE (src) == MEM
2364 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2365 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2366 && bytelen == GET_MODE_SIZE (mode))
2368 tmps[i] = gen_reg_rtx (mode);
2369 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2371 else if (GET_CODE (src) == CONCAT)
2373 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2374 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2376 if ((bytepos == 0 && bytelen == slen0)
2377 || (bytepos != 0 && bytepos + bytelen <= slen))
2379 /* The following assumes that the concatenated objects all
2380 have the same size. In this case, a simple calculation
2381 can be used to determine the object and the bit field
2383 tmps[i] = XEXP (src, bytepos / slen0);
2384 if (! CONSTANT_P (tmps[i])
2385 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2386 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2387 (bytepos % slen0) * BITS_PER_UNIT,
2388 1, NULL_RTX, mode, mode, ssize);
2390 else if (bytepos == 0)
2392 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2393 emit_move_insn (mem, src);
2394 tmps[i] = adjust_address (mem, mode, 0);
2399 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2400 SIMD register, which is currently broken. While we get GCC
2401 to emit proper RTL for these cases, let's dump to memory. */
2402 else if (VECTOR_MODE_P (GET_MODE (dst))
2403 && GET_CODE (src) == REG)
2405 int slen = GET_MODE_SIZE (GET_MODE (src));
2408 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2409 emit_move_insn (mem, src);
2410 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2412 else if (CONSTANT_P (src)
2413 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2416 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2417 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2420 if (BYTES_BIG_ENDIAN && shift)
2421 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2422 tmps[i], 0, OPTAB_WIDEN);
2427 /* Copy the extracted pieces into the proper (probable) hard regs. */
2428 for (i = start; i < XVECLEN (dst, 0); i++)
2429 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2432 /* Emit code to move a block SRC to block DST, where SRC and DST are
2433 non-consecutive groups of registers, each represented by a PARALLEL. */
2436 emit_group_move (dst, src)
2441 if (GET_CODE (src) != PARALLEL
2442 || GET_CODE (dst) != PARALLEL
2443 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2446 /* Skip first entry if NULL. */
2447 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2448 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2449 XEXP (XVECEXP (src, 0, i), 0));
2452 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2453 registers represented by a PARALLEL. SSIZE represents the total size of
2454 block DST, or -1 if not known. */
2457 emit_group_store (orig_dst, src, ssize)
2464 if (GET_CODE (src) != PARALLEL)
2467 /* Check for a NULL entry, used to indicate that the parameter goes
2468 both on the stack and in registers. */
2469 if (XEXP (XVECEXP (src, 0, 0), 0))
2474 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2476 /* Copy the (probable) hard regs into pseudos. */
2477 for (i = start; i < XVECLEN (src, 0); i++)
2479 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2480 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2481 emit_move_insn (tmps[i], reg);
2485 /* If we won't be storing directly into memory, protect the real destination
2486 from strange tricks we might play. */
2488 if (GET_CODE (dst) == PARALLEL)
2492 /* We can get a PARALLEL dst if there is a conditional expression in
2493 a return statement. In that case, the dst and src are the same,
2494 so no action is necessary. */
2495 if (rtx_equal_p (dst, src))
2498 /* It is unclear if we can ever reach here, but we may as well handle
2499 it. Allocate a temporary, and split this into a store/load to/from
2502 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2503 emit_group_store (temp, src, ssize);
2504 emit_group_load (dst, temp, ssize);
2507 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2509 dst = gen_reg_rtx (GET_MODE (orig_dst));
2510 /* Make life a bit easier for combine. */
2511 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2514 /* Process the pieces. */
2515 for (i = start; i < XVECLEN (src, 0); i++)
2517 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2518 enum machine_mode mode = GET_MODE (tmps[i]);
2519 unsigned int bytelen = GET_MODE_SIZE (mode);
2522 /* Handle trailing fragments that run over the size of the struct. */
2523 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2525 if (BYTES_BIG_ENDIAN)
2527 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2528 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2529 tmps[i], 0, OPTAB_WIDEN);
2531 bytelen = ssize - bytepos;
2534 if (GET_CODE (dst) == CONCAT)
2536 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2537 dest = XEXP (dst, 0);
2538 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2540 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2541 dest = XEXP (dst, 1);
2543 else if (bytepos == 0 && XVECLEN (src, 0))
2545 dest = assign_stack_temp (GET_MODE (dest),
2546 GET_MODE_SIZE (GET_MODE (dest)), 0);
2547 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2556 /* Optimize the access just a bit. */
2557 if (GET_CODE (dest) == MEM
2558 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2559 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2560 && bytelen == GET_MODE_SIZE (mode))
2561 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2563 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2564 mode, tmps[i], ssize);
2569 /* Copy from the pseudo into the (probable) hard reg. */
2570 if (orig_dst != dst)
2571 emit_move_insn (orig_dst, dst);
2574 /* Generate code to copy a BLKmode object of TYPE out of a
2575 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2576 is null, a stack temporary is created. TGTBLK is returned.
2578 The primary purpose of this routine is to handle functions
2579 that return BLKmode structures in registers. Some machines
2580 (the PA for example) want to return all small structures
2581 in registers regardless of the structure's alignment. */
2584 copy_blkmode_from_reg (tgtblk, srcreg, type)
2589 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2590 rtx src = NULL, dst = NULL;
2591 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2592 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2596 tgtblk = assign_temp (build_qualified_type (type,
2598 | TYPE_QUAL_CONST)),
2600 preserve_temp_slots (tgtblk);
2603 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2604 into a new pseudo which is a full word. */
2606 if (GET_MODE (srcreg) != BLKmode
2607 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2608 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2610 /* Structures whose size is not a multiple of a word are aligned
2611 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2612 machine, this means we must skip the empty high order bytes when
2613 calculating the bit offset. */
2614 if (BYTES_BIG_ENDIAN
2615 && bytes % UNITS_PER_WORD)
2616 big_endian_correction
2617 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2619 /* Copy the structure BITSIZE bites at a time.
2621 We could probably emit more efficient code for machines which do not use
2622 strict alignment, but it doesn't seem worth the effort at the current
2624 for (bitpos = 0, xbitpos = big_endian_correction;
2625 bitpos < bytes * BITS_PER_UNIT;
2626 bitpos += bitsize, xbitpos += bitsize)
2628 /* We need a new source operand each time xbitpos is on a
2629 word boundary and when xbitpos == big_endian_correction
2630 (the first time through). */
2631 if (xbitpos % BITS_PER_WORD == 0
2632 || xbitpos == big_endian_correction)
2633 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2636 /* We need a new destination operand each time bitpos is on
2638 if (bitpos % BITS_PER_WORD == 0)
2639 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2641 /* Use xbitpos for the source extraction (right justified) and
2642 xbitpos for the destination store (left justified). */
2643 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2644 extract_bit_field (src, bitsize,
2645 xbitpos % BITS_PER_WORD, 1,
2646 NULL_RTX, word_mode, word_mode,
2654 /* Add a USE expression for REG to the (possibly empty) list pointed
2655 to by CALL_FUSAGE. REG must denote a hard register. */
2658 use_reg (call_fusage, reg)
2659 rtx *call_fusage, reg;
2661 if (GET_CODE (reg) != REG
2662 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2666 = gen_rtx_EXPR_LIST (VOIDmode,
2667 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2670 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2671 starting at REGNO. All of these registers must be hard registers. */
2674 use_regs (call_fusage, regno, nregs)
2681 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2684 for (i = 0; i < nregs; i++)
2685 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2688 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2689 PARALLEL REGS. This is for calls that pass values in multiple
2690 non-contiguous locations. The Irix 6 ABI has examples of this. */
2693 use_group_regs (call_fusage, regs)
2699 for (i = 0; i < XVECLEN (regs, 0); i++)
2701 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2703 /* A NULL entry means the parameter goes both on the stack and in
2704 registers. This can also be a MEM for targets that pass values
2705 partially on the stack and partially in registers. */
2706 if (reg != 0 && GET_CODE (reg) == REG)
2707 use_reg (call_fusage, reg);
2712 /* Determine whether the LEN bytes generated by CONSTFUN can be
2713 stored to memory using several move instructions. CONSTFUNDATA is
2714 a pointer which will be passed as argument in every CONSTFUN call.
2715 ALIGN is maximum alignment we can assume. Return nonzero if a
2716 call to store_by_pieces should succeed. */
2719 can_store_by_pieces (len, constfun, constfundata, align)
2720 unsigned HOST_WIDE_INT len;
2721 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2725 unsigned HOST_WIDE_INT max_size, l;
2726 HOST_WIDE_INT offset = 0;
2727 enum machine_mode mode, tmode;
2728 enum insn_code icode;
2732 if (! STORE_BY_PIECES_P (len, align))
2735 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2736 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2737 align = MOVE_MAX * BITS_PER_UNIT;
2739 /* We would first store what we can in the largest integer mode, then go to
2740 successively smaller modes. */
2743 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2748 max_size = STORE_MAX_PIECES + 1;
2749 while (max_size > 1)
2751 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2752 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2753 if (GET_MODE_SIZE (tmode) < max_size)
2756 if (mode == VOIDmode)
2759 icode = mov_optab->handlers[(int) mode].insn_code;
2760 if (icode != CODE_FOR_nothing
2761 && align >= GET_MODE_ALIGNMENT (mode))
2763 unsigned int size = GET_MODE_SIZE (mode);
2770 cst = (*constfun) (constfundata, offset, mode);
2771 if (!LEGITIMATE_CONSTANT_P (cst))
2781 max_size = GET_MODE_SIZE (mode);
2784 /* The code above should have handled everything. */
2792 /* Generate several move instructions to store LEN bytes generated by
2793 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2794 pointer which will be passed as argument in every CONSTFUN call.
2795 ALIGN is maximum alignment we can assume.
2796 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2797 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2801 store_by_pieces (to, len, constfun, constfundata, align, endp)
2803 unsigned HOST_WIDE_INT len;
2804 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2809 struct store_by_pieces data;
2811 if (! STORE_BY_PIECES_P (len, align))
2813 to = protect_from_queue (to, 1);
2814 data.constfun = constfun;
2815 data.constfundata = constfundata;
2818 store_by_pieces_1 (&data, align);
2829 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2830 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2832 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2835 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2842 to1 = adjust_address (data.to, QImode, data.offset);
2850 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2851 rtx with BLKmode). The caller must pass TO through protect_from_queue
2852 before calling. ALIGN is maximum alignment we can assume. */
2855 clear_by_pieces (to, len, align)
2857 unsigned HOST_WIDE_INT len;
2860 struct store_by_pieces data;
2862 data.constfun = clear_by_pieces_1;
2863 data.constfundata = NULL;
2866 store_by_pieces_1 (&data, align);
2869 /* Callback routine for clear_by_pieces.
2870 Return const0_rtx unconditionally. */
2873 clear_by_pieces_1 (data, offset, mode)
2874 PTR data ATTRIBUTE_UNUSED;
2875 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2876 enum machine_mode mode ATTRIBUTE_UNUSED;
2881 /* Subroutine of clear_by_pieces and store_by_pieces.
2882 Generate several move instructions to store LEN bytes of block TO. (A MEM
2883 rtx with BLKmode). The caller must pass TO through protect_from_queue
2884 before calling. ALIGN is maximum alignment we can assume. */
2887 store_by_pieces_1 (data, align)
2888 struct store_by_pieces *data;
2891 rtx to_addr = XEXP (data->to, 0);
2892 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2893 enum machine_mode mode = VOIDmode, tmode;
2894 enum insn_code icode;
2897 data->to_addr = to_addr;
2899 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2900 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2902 data->explicit_inc_to = 0;
2904 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2906 data->offset = data->len;
2908 /* If storing requires more than two move insns,
2909 copy addresses to registers (to make displacements shorter)
2910 and use post-increment if available. */
2911 if (!data->autinc_to
2912 && move_by_pieces_ninsns (data->len, align) > 2)
2914 /* Determine the main mode we'll be using. */
2915 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2916 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2917 if (GET_MODE_SIZE (tmode) < max_size)
2920 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2922 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2923 data->autinc_to = 1;
2924 data->explicit_inc_to = -1;
2927 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2928 && ! data->autinc_to)
2930 data->to_addr = copy_addr_to_reg (to_addr);
2931 data->autinc_to = 1;
2932 data->explicit_inc_to = 1;
2935 if ( !data->autinc_to && CONSTANT_P (to_addr))
2936 data->to_addr = copy_addr_to_reg (to_addr);
2939 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2940 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2941 align = MOVE_MAX * BITS_PER_UNIT;
2943 /* First store what we can in the largest integer mode, then go to
2944 successively smaller modes. */
2946 while (max_size > 1)
2948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2950 if (GET_MODE_SIZE (tmode) < max_size)
2953 if (mode == VOIDmode)
2956 icode = mov_optab->handlers[(int) mode].insn_code;
2957 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2958 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2960 max_size = GET_MODE_SIZE (mode);
2963 /* The code above should have handled everything. */
2968 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2969 with move instructions for mode MODE. GENFUN is the gen_... function
2970 to make a move insn for that mode. DATA has all the other info. */
2973 store_by_pieces_2 (genfun, mode, data)
2974 rtx (*genfun) PARAMS ((rtx, ...));
2975 enum machine_mode mode;
2976 struct store_by_pieces *data;
2978 unsigned int size = GET_MODE_SIZE (mode);
2981 while (data->len >= size)
2984 data->offset -= size;
2986 if (data->autinc_to)
2987 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2990 to1 = adjust_address (data->to, mode, data->offset);
2992 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2993 emit_insn (gen_add2_insn (data->to_addr,
2994 GEN_INT (-(HOST_WIDE_INT) size)));
2996 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2997 emit_insn ((*genfun) (to1, cst));
2999 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
3000 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
3002 if (! data->reverse)
3003 data->offset += size;
3009 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3010 its length in bytes. */
3013 clear_storage (object, size)
3018 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
3019 : GET_MODE_ALIGNMENT (GET_MODE (object)));
3021 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3022 just move a zero. Otherwise, do this a piece at a time. */
3023 if (GET_MODE (object) != BLKmode
3024 && GET_CODE (size) == CONST_INT
3025 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
3026 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
3029 object = protect_from_queue (object, 1);
3030 size = protect_from_queue (size, 0);
3032 if (GET_CODE (size) == CONST_INT
3033 && CLEAR_BY_PIECES_P (INTVAL (size), align))
3034 clear_by_pieces (object, INTVAL (size), align);
3035 else if (clear_storage_via_clrstr (object, size, align))
3038 retval = clear_storage_via_libcall (object, size);
3044 /* A subroutine of clear_storage. Expand a clrstr pattern;
3045 return true if successful. */
3048 clear_storage_via_clrstr (object, size, align)
3052 /* Try the most limited insn first, because there's no point
3053 including more than one in the machine description unless
3054 the more limited one has some advantage. */
3056 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3057 enum machine_mode mode;
3059 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3060 mode = GET_MODE_WIDER_MODE (mode))
3062 enum insn_code code = clrstr_optab[(int) mode];
3063 insn_operand_predicate_fn pred;
3065 if (code != CODE_FOR_nothing
3066 /* We don't need MODE to be narrower than
3067 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3068 the mode mask, as it is returned by the macro, it will
3069 definitely be less than the actual mode mask. */
3070 && ((GET_CODE (size) == CONST_INT
3071 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3072 <= (GET_MODE_MASK (mode) >> 1)))
3073 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3074 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3075 || (*pred) (object, BLKmode))
3076 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3077 || (*pred) (opalign, VOIDmode)))
3080 rtx last = get_last_insn ();
3083 op1 = convert_to_mode (mode, size, 1);
3084 pred = insn_data[(int) code].operand[1].predicate;
3085 if (pred != 0 && ! (*pred) (op1, mode))
3086 op1 = copy_to_mode_reg (mode, op1);
3088 pat = GEN_FCN ((int) code) (object, op1, opalign);
3095 delete_insns_since (last);
3102 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3103 Return the return value of memset, 0 otherwise. */
3106 clear_storage_via_libcall (object, size)
3109 tree call_expr, arg_list, fn, object_tree, size_tree;
3110 enum machine_mode size_mode;
3113 /* OBJECT or SIZE may have been passed through protect_from_queue.
3115 It is unsafe to save the value generated by protect_from_queue
3116 and reuse it later. Consider what happens if emit_queue is
3117 called before the return value from protect_from_queue is used.
3119 Expansion of the CALL_EXPR below will call emit_queue before
3120 we are finished emitting RTL for argument setup. So if we are
3121 not careful we could get the wrong value for an argument.
3123 To avoid this problem we go ahead and emit code to copy OBJECT
3124 and SIZE into new pseudos. We can then place those new pseudos
3125 into an RTL_EXPR and use them later, even after a call to
3128 Note this is not strictly needed for library calls since they
3129 do not call emit_queue before loading their arguments. However,
3130 we may need to have library calls call emit_queue in the future
3131 since failing to do so could cause problems for targets which
3132 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3134 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3136 if (TARGET_MEM_FUNCTIONS)
3137 size_mode = TYPE_MODE (sizetype);
3139 size_mode = TYPE_MODE (unsigned_type_node);
3140 size = convert_to_mode (size_mode, size, 1);
3141 size = copy_to_mode_reg (size_mode, size);
3143 /* It is incorrect to use the libcall calling conventions to call
3144 memset in this context. This could be a user call to memset and
3145 the user may wish to examine the return value from memset. For
3146 targets where libcalls and normal calls have different conventions
3147 for returning pointers, we could end up generating incorrect code.
3149 For convenience, we generate the call to bzero this way as well. */
3151 object_tree = make_tree (ptr_type_node, object);
3152 if (TARGET_MEM_FUNCTIONS)
3153 size_tree = make_tree (sizetype, size);
3155 size_tree = make_tree (unsigned_type_node, size);
3157 fn = clear_storage_libcall_fn (true);
3158 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3159 if (TARGET_MEM_FUNCTIONS)
3160 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3161 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3163 /* Now we have to build up the CALL_EXPR itself. */
3164 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3165 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3166 call_expr, arg_list, NULL_TREE);
3167 TREE_SIDE_EFFECTS (call_expr) = 1;
3169 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3171 /* If we are initializing a readonly value, show the above call
3172 clobbered it. Otherwise, a load from it may erroneously be
3173 hoisted from a loop. */
3174 if (RTX_UNCHANGING_P (object))
3175 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3177 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3180 /* A subroutine of clear_storage_via_libcall. Create the tree node
3181 for the function we use for block clears. The first time FOR_CALL
3182 is true, we call assemble_external. */
3184 static GTY(()) tree block_clear_fn;
3187 init_block_clear_fn (asmspec)
3188 const char *asmspec;
3190 if (!block_clear_fn)
3194 if (TARGET_MEM_FUNCTIONS)
3196 fn = get_identifier ("memset");
3197 args = build_function_type_list (ptr_type_node, ptr_type_node,
3198 integer_type_node, sizetype,
3203 fn = get_identifier ("bzero");
3204 args = build_function_type_list (void_type_node, ptr_type_node,
3205 unsigned_type_node, NULL_TREE);
3208 fn = build_decl (FUNCTION_DECL, fn, args);
3209 DECL_EXTERNAL (fn) = 1;
3210 TREE_PUBLIC (fn) = 1;
3211 DECL_ARTIFICIAL (fn) = 1;
3212 TREE_NOTHROW (fn) = 1;
3214 block_clear_fn = fn;
3219 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3220 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3225 clear_storage_libcall_fn (for_call)
3228 static bool emitted_extern;
3230 if (!block_clear_fn)
3231 init_block_clear_fn (NULL);
3233 if (for_call && !emitted_extern)
3235 emitted_extern = true;
3236 make_decl_rtl (block_clear_fn, NULL);
3237 assemble_external (block_clear_fn);
3240 return block_clear_fn;
3243 /* Generate code to copy Y into X.
3244 Both Y and X must have the same mode, except that
3245 Y can be a constant with VOIDmode.
3246 This mode cannot be BLKmode; use emit_block_move for that.
3248 Return the last instruction emitted. */
3251 emit_move_insn (x, y)
3254 enum machine_mode mode = GET_MODE (x);
3255 rtx y_cst = NULL_RTX;
3258 x = protect_from_queue (x, 1);
3259 y = protect_from_queue (y, 0);
3261 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3264 /* Never force constant_p_rtx to memory. */
3265 if (GET_CODE (y) == CONSTANT_P_RTX)
3267 else if (CONSTANT_P (y))
3270 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3271 && (last_insn = compress_float_constant (x, y)))
3276 if (!LEGITIMATE_CONSTANT_P (y))
3278 y = force_const_mem (mode, y);
3280 /* If the target's cannot_force_const_mem prevented the spill,
3281 assume that the target's move expanders will also take care
3282 of the non-legitimate constant. */
3288 /* If X or Y are memory references, verify that their addresses are valid
3290 if (GET_CODE (x) == MEM
3291 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3292 && ! push_operand (x, GET_MODE (x)))
3294 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3295 x = validize_mem (x);
3297 if (GET_CODE (y) == MEM
3298 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3300 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3301 y = validize_mem (y);
3303 if (mode == BLKmode)
3306 last_insn = emit_move_insn_1 (x, y);
3308 if (y_cst && GET_CODE (x) == REG
3309 && (set = single_set (last_insn)) != NULL_RTX
3310 && SET_DEST (set) == x
3311 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3312 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3322 emit_move_insn_1 (x, y)
3325 enum machine_mode mode = GET_MODE (x);
3326 enum machine_mode submode;
3327 enum mode_class class = GET_MODE_CLASS (mode);
3329 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3332 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3334 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3336 /* Expand complex moves by moving real part and imag part, if possible. */
3337 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3338 && BLKmode != (submode = GET_MODE_INNER (mode))
3339 && (mov_optab->handlers[(int) submode].insn_code
3340 != CODE_FOR_nothing))
3342 /* Don't split destination if it is a stack push. */
3343 int stack = push_operand (x, GET_MODE (x));
3345 #ifdef PUSH_ROUNDING
3346 /* In case we output to the stack, but the size is smaller machine can
3347 push exactly, we need to use move instructions. */
3349 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3350 != GET_MODE_SIZE (submode)))
3353 HOST_WIDE_INT offset1, offset2;
3355 /* Do not use anti_adjust_stack, since we don't want to update
3356 stack_pointer_delta. */
3357 temp = expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3366 (GET_MODE_SIZE (GET_MODE (x)))),
3367 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3369 if (temp != stack_pointer_rtx)
3370 emit_move_insn (stack_pointer_rtx, temp);
3372 #ifdef STACK_GROWS_DOWNWARD
3374 offset2 = GET_MODE_SIZE (submode);
3376 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3377 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3378 + GET_MODE_SIZE (submode));
3381 emit_move_insn (change_address (x, submode,
3382 gen_rtx_PLUS (Pmode,
3384 GEN_INT (offset1))),
3385 gen_realpart (submode, y));
3386 emit_move_insn (change_address (x, submode,
3387 gen_rtx_PLUS (Pmode,
3389 GEN_INT (offset2))),
3390 gen_imagpart (submode, y));
3394 /* If this is a stack, push the highpart first, so it
3395 will be in the argument order.
3397 In that case, change_address is used only to convert
3398 the mode, not to change the address. */
3401 /* Note that the real part always precedes the imag part in memory
3402 regardless of machine's endianness. */
3403 #ifdef STACK_GROWS_DOWNWARD
3404 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3405 gen_imagpart (submode, y));
3406 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3407 gen_realpart (submode, y));
3409 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3410 gen_realpart (submode, y));
3411 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3412 gen_imagpart (submode, y));
3417 rtx realpart_x, realpart_y;
3418 rtx imagpart_x, imagpart_y;
3420 /* If this is a complex value with each part being smaller than a
3421 word, the usual calling sequence will likely pack the pieces into
3422 a single register. Unfortunately, SUBREG of hard registers only
3423 deals in terms of words, so we have a problem converting input
3424 arguments to the CONCAT of two registers that is used elsewhere
3425 for complex values. If this is before reload, we can copy it into
3426 memory and reload. FIXME, we should see about using extract and
3427 insert on integer registers, but complex short and complex char
3428 variables should be rarely used. */
3429 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3430 && (reload_in_progress | reload_completed) == 0)
3433 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3435 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3437 if (packed_dest_p || packed_src_p)
3439 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3440 ? MODE_FLOAT : MODE_INT);
3442 enum machine_mode reg_mode
3443 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3445 if (reg_mode != BLKmode)
3447 rtx mem = assign_stack_temp (reg_mode,
3448 GET_MODE_SIZE (mode), 0);
3449 rtx cmem = adjust_address (mem, mode, 0);
3452 = N_("function using short complex types cannot be inline");
3456 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3458 emit_move_insn_1 (cmem, y);
3459 return emit_move_insn_1 (sreg, mem);
3463 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3465 emit_move_insn_1 (mem, sreg);
3466 return emit_move_insn_1 (x, cmem);
3472 realpart_x = gen_realpart (submode, x);
3473 realpart_y = gen_realpart (submode, y);
3474 imagpart_x = gen_imagpart (submode, x);
3475 imagpart_y = gen_imagpart (submode, y);
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values.
3480 We never want to emit such a clobber after reload. */
3482 && ! (reload_in_progress || reload_completed)
3483 && (GET_CODE (realpart_x) == SUBREG
3484 || GET_CODE (imagpart_x) == SUBREG))
3485 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3487 emit_move_insn (realpart_x, realpart_y);
3488 emit_move_insn (imagpart_x, imagpart_y);
3491 return get_last_insn ();
3494 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3495 find a mode to do it in. If we have a movcc, use it. Otherwise,
3496 find the MODE_INT mode of the same width. */
3497 else if (GET_MODE_CLASS (mode) == MODE_CC
3498 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3500 enum insn_code insn_code;
3501 enum machine_mode tmode = VOIDmode;
3505 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3508 for (tmode = QImode; tmode != VOIDmode;
3509 tmode = GET_MODE_WIDER_MODE (tmode))
3510 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3513 if (tmode == VOIDmode)
3516 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3517 may call change_address which is not appropriate if we were
3518 called when a reload was in progress. We don't have to worry
3519 about changing the address since the size in bytes is supposed to
3520 be the same. Copy the MEM to change the mode and move any
3521 substitutions from the old MEM to the new one. */
3523 if (reload_in_progress)
3525 x = gen_lowpart_common (tmode, x1);
3526 if (x == 0 && GET_CODE (x1) == MEM)
3528 x = adjust_address_nv (x1, tmode, 0);
3529 copy_replacements (x1, x);
3532 y = gen_lowpart_common (tmode, y1);
3533 if (y == 0 && GET_CODE (y1) == MEM)
3535 y = adjust_address_nv (y1, tmode, 0);
3536 copy_replacements (y1, y);
3541 x = gen_lowpart (tmode, x);
3542 y = gen_lowpart (tmode, y);
3545 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3546 return emit_insn (GEN_FCN (insn_code) (x, y));
3549 /* This will handle any multi-word or full-word mode that lacks a move_insn
3550 pattern. However, you will get better code if you define such patterns,
3551 even if they must turn into multiple assembler instructions. */
3552 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3559 #ifdef PUSH_ROUNDING
3561 /* If X is a push on the stack, do the push now and replace
3562 X with a reference to the stack pointer. */
3563 if (push_operand (x, GET_MODE (x)))
3568 /* Do not use anti_adjust_stack, since we don't want to update
3569 stack_pointer_delta. */
3570 temp = expand_binop (Pmode,
3571 #ifdef STACK_GROWS_DOWNWARD
3579 (GET_MODE_SIZE (GET_MODE (x)))),
3580 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3582 if (temp != stack_pointer_rtx)
3583 emit_move_insn (stack_pointer_rtx, temp);
3585 code = GET_CODE (XEXP (x, 0));
3587 /* Just hope that small offsets off SP are OK. */
3588 if (code == POST_INC)
3589 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3590 GEN_INT (-((HOST_WIDE_INT)
3591 GET_MODE_SIZE (GET_MODE (x)))));
3592 else if (code == POST_DEC)
3593 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3594 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3596 temp = stack_pointer_rtx;
3598 x = change_address (x, VOIDmode, temp);
3602 /* If we are in reload, see if either operand is a MEM whose address
3603 is scheduled for replacement. */
3604 if (reload_in_progress && GET_CODE (x) == MEM
3605 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3606 x = replace_equiv_address_nv (x, inner);
3607 if (reload_in_progress && GET_CODE (y) == MEM
3608 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3609 y = replace_equiv_address_nv (y, inner);
3615 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3618 rtx xpart = operand_subword (x, i, 1, mode);
3619 rtx ypart = operand_subword (y, i, 1, mode);
3621 /* If we can't get a part of Y, put Y into memory if it is a
3622 constant. Otherwise, force it into a register. If we still
3623 can't get a part of Y, abort. */
3624 if (ypart == 0 && CONSTANT_P (y))
3626 y = force_const_mem (mode, y);
3627 ypart = operand_subword (y, i, 1, mode);
3629 else if (ypart == 0)
3630 ypart = operand_subword_force (y, i, mode);
3632 if (xpart == 0 || ypart == 0)
3635 need_clobber |= (GET_CODE (xpart) == SUBREG);
3637 last_insn = emit_move_insn (xpart, ypart);
3643 /* Show the output dies here. This is necessary for SUBREGs
3644 of pseudos since we cannot track their lifetimes correctly;
3645 hard regs shouldn't appear here except as return values.
3646 We never want to emit such a clobber after reload. */
3648 && ! (reload_in_progress || reload_completed)
3649 && need_clobber != 0)
3650 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3665 compress_float_constant (x, y)
3668 enum machine_mode dstmode = GET_MODE (x);
3669 enum machine_mode orig_srcmode = GET_MODE (y);
3670 enum machine_mode srcmode;
3673 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3675 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3676 srcmode != orig_srcmode;
3677 srcmode = GET_MODE_WIDER_MODE (srcmode))
3680 rtx trunc_y, last_insn;
3682 /* Skip if the target can't extend this way. */
3683 ic = can_extend_p (dstmode, srcmode, 0);
3684 if (ic == CODE_FOR_nothing)
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode, &r))
3691 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3693 if (LEGITIMATE_CONSTANT_P (trunc_y))
3695 /* Skip if the target needs extra instructions to perform
3697 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3700 else if (float_extend_from_mem[dstmode][srcmode])
3701 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3705 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3706 last_insn = get_last_insn ();
3708 if (GET_CODE (x) == REG)
3709 set_unique_reg_note (last_insn, REG_EQUAL, y);
3717 /* Pushing data onto the stack. */
3719 /* Push a block of length SIZE (perhaps variable)
3720 and return an rtx to address the beginning of the block.
3721 Note that it is not possible for the value returned to be a QUEUED.
3722 The value may be virtual_outgoing_args_rtx.
3724 EXTRA is the number of bytes of padding to push in addition to SIZE.
3725 BELOW nonzero means this padding comes at low addresses;
3726 otherwise, the padding comes at high addresses. */
3729 push_block (size, extra, below)
3735 size = convert_modes (Pmode, ptr_mode, size, 1);
3736 if (CONSTANT_P (size))
3737 anti_adjust_stack (plus_constant (size, extra));
3738 else if (GET_CODE (size) == REG && extra == 0)
3739 anti_adjust_stack (size);
3742 temp = copy_to_mode_reg (Pmode, size);
3744 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3745 temp, 0, OPTAB_LIB_WIDEN);
3746 anti_adjust_stack (temp);
3749 #ifndef STACK_GROWS_DOWNWARD
3755 temp = virtual_outgoing_args_rtx;
3756 if (extra != 0 && below)
3757 temp = plus_constant (temp, extra);
3761 if (GET_CODE (size) == CONST_INT)
3762 temp = plus_constant (virtual_outgoing_args_rtx,
3763 -INTVAL (size) - (below ? 0 : extra));
3764 else if (extra != 0 && !below)
3765 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3766 negate_rtx (Pmode, plus_constant (size, extra)));
3768 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3769 negate_rtx (Pmode, size));
3772 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3775 #ifdef PUSH_ROUNDING
3777 /* Emit single push insn. */
3780 emit_single_push_insn (mode, x, type)
3782 enum machine_mode mode;
3786 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3788 enum insn_code icode;
3789 insn_operand_predicate_fn pred;
3791 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3792 /* If there is push pattern, use it. Otherwise try old way of throwing
3793 MEM representing push operation to move expander. */
3794 icode = push_optab->handlers[(int) mode].insn_code;
3795 if (icode != CODE_FOR_nothing)
3797 if (((pred = insn_data[(int) icode].operand[0].predicate)
3798 && !((*pred) (x, mode))))
3799 x = force_reg (mode, x);
3800 emit_insn (GEN_FCN (icode) (x));
3803 if (GET_MODE_SIZE (mode) == rounded_size)
3804 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3807 #ifdef STACK_GROWS_DOWNWARD
3808 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3809 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3811 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3812 GEN_INT (rounded_size));
3814 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3817 dest = gen_rtx_MEM (mode, dest_addr);
3821 set_mem_attributes (dest, type, 1);
3823 if (flag_optimize_sibling_calls)
3824 /* Function incoming arguments may overlap with sibling call
3825 outgoing arguments and we cannot allow reordering of reads
3826 from function arguments with stores to outgoing arguments
3827 of sibling calls. */
3828 set_mem_alias_set (dest, 0);
3830 emit_move_insn (dest, x);
3834 /* Generate code to push X onto the stack, assuming it has mode MODE and
3836 MODE is redundant except when X is a CONST_INT (since they don't
3838 SIZE is an rtx for the size of data to be copied (in bytes),
3839 needed only if X is BLKmode.
3841 ALIGN (in bits) is maximum alignment we can assume.
3843 If PARTIAL and REG are both nonzero, then copy that many of the first
3844 words of X into registers starting with REG, and push the rest of X.
3845 The amount of space pushed is decreased by PARTIAL words,
3846 rounded *down* to a multiple of PARM_BOUNDARY.
3847 REG must be a hard register in this case.
3848 If REG is zero but PARTIAL is not, take any all others actions for an
3849 argument partially in registers, but do not actually load any
3852 EXTRA is the amount in bytes of extra space to leave next to this arg.
3853 This is ignored if an argument block has already been allocated.
3855 On a machine that lacks real push insns, ARGS_ADDR is the address of
3856 the bottom of the argument block for this call. We use indexing off there
3857 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3858 argument block has not been preallocated.
3860 ARGS_SO_FAR is the size of args previously pushed for this call.
3862 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3863 for arguments passed in registers. If nonzero, it will be the number
3864 of bytes required. */
3867 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3868 args_addr, args_so_far, reg_parm_stack_space,
3871 enum machine_mode mode;
3880 int reg_parm_stack_space;
3884 enum direction stack_direction
3885 #ifdef STACK_GROWS_DOWNWARD
3891 /* Decide where to pad the argument: `downward' for below,
3892 `upward' for above, or `none' for don't pad it.
3893 Default is below for small data on big-endian machines; else above. */
3894 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3896 /* Invert direction if stack is post-decrement.
3898 if (STACK_PUSH_CODE == POST_DEC)
3899 if (where_pad != none)
3900 where_pad = (where_pad == downward ? upward : downward);
3902 xinner = x = protect_from_queue (x, 0);
3904 if (mode == BLKmode)
3906 /* Copy a block into the stack, entirely or partially. */
3909 int used = partial * UNITS_PER_WORD;
3910 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3918 /* USED is now the # of bytes we need not copy to the stack
3919 because registers will take care of them. */
3922 xinner = adjust_address (xinner, BLKmode, used);
3924 /* If the partial register-part of the arg counts in its stack size,
3925 skip the part of stack space corresponding to the registers.
3926 Otherwise, start copying to the beginning of the stack space,
3927 by setting SKIP to 0. */
3928 skip = (reg_parm_stack_space == 0) ? 0 : used;
3930 #ifdef PUSH_ROUNDING
3931 /* Do it with several push insns if that doesn't take lots of insns
3932 and if there is no difficulty with push insns that skip bytes
3933 on the stack for alignment purposes. */
3936 && GET_CODE (size) == CONST_INT
3938 && MEM_ALIGN (xinner) >= align
3939 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3940 /* Here we avoid the case of a structure whose weak alignment
3941 forces many pushes of a small amount of data,
3942 and such small pushes do rounding that causes trouble. */
3943 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3944 || align >= BIGGEST_ALIGNMENT
3945 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3946 == (align / BITS_PER_UNIT)))
3947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3949 /* Push padding now if padding above and stack grows down,
3950 or if padding below and stack grows up.
3951 But if space already allocated, this has already been done. */
3952 if (extra && args_addr == 0
3953 && where_pad != none && where_pad != stack_direction)
3954 anti_adjust_stack (GEN_INT (extra));
3956 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3959 #endif /* PUSH_ROUNDING */
3963 /* Otherwise make space on the stack and copy the data
3964 to the address of that space. */
3966 /* Deduct words put into registers from the size we must copy. */
3969 if (GET_CODE (size) == CONST_INT)
3970 size = GEN_INT (INTVAL (size) - used);
3972 size = expand_binop (GET_MODE (size), sub_optab, size,
3973 GEN_INT (used), NULL_RTX, 0,
3977 /* Get the address of the stack space.
3978 In this case, we do not deal with EXTRA separately.
3979 A single stack adjust will do. */
3982 temp = push_block (size, extra, where_pad == downward);
3985 else if (GET_CODE (args_so_far) == CONST_INT)
3986 temp = memory_address (BLKmode,
3987 plus_constant (args_addr,
3988 skip + INTVAL (args_so_far)));
3990 temp = memory_address (BLKmode,
3991 plus_constant (gen_rtx_PLUS (Pmode,
3996 if (!ACCUMULATE_OUTGOING_ARGS)
3998 /* If the source is referenced relative to the stack pointer,
3999 copy it to another register to stabilize it. We do not need
4000 to do this if we know that we won't be changing sp. */
4002 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4003 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4004 temp = copy_to_reg (temp);
4007 target = gen_rtx_MEM (BLKmode, temp);
4011 set_mem_attributes (target, type, 1);
4012 /* Function incoming arguments may overlap with sibling call
4013 outgoing arguments and we cannot allow reordering of reads
4014 from function arguments with stores to outgoing arguments
4015 of sibling calls. */
4016 set_mem_alias_set (target, 0);
4019 /* ALIGN may well be better aligned than TYPE, e.g. due to
4020 PARM_BOUNDARY. Assume the caller isn't lying. */
4021 set_mem_align (target, align);
4023 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4026 else if (partial > 0)
4028 /* Scalar partly in registers. */
4030 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4033 /* # words of start of argument
4034 that we must make space for but need not store. */
4035 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4036 int args_offset = INTVAL (args_so_far);
4039 /* Push padding now if padding above and stack grows down,
4040 or if padding below and stack grows up.
4041 But if space already allocated, this has already been done. */
4042 if (extra && args_addr == 0
4043 && where_pad != none && where_pad != stack_direction)
4044 anti_adjust_stack (GEN_INT (extra));
4046 /* If we make space by pushing it, we might as well push
4047 the real data. Otherwise, we can leave OFFSET nonzero
4048 and leave the space uninitialized. */
4052 /* Now NOT_STACK gets the number of words that we don't need to
4053 allocate on the stack. */
4054 not_stack = partial - offset;
4056 /* If the partial register-part of the arg counts in its stack size,
4057 skip the part of stack space corresponding to the registers.
4058 Otherwise, start copying to the beginning of the stack space,
4059 by setting SKIP to 0. */
4060 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4062 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4063 x = validize_mem (force_const_mem (mode, x));
4065 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4066 SUBREGs of such registers are not allowed. */
4067 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4068 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4069 x = copy_to_reg (x);
4071 /* Loop over all the words allocated on the stack for this arg. */
4072 /* We can do it by words, because any scalar bigger than a word
4073 has a size a multiple of a word. */
4074 #ifndef PUSH_ARGS_REVERSED
4075 for (i = not_stack; i < size; i++)
4077 for (i = size - 1; i >= not_stack; i--)
4079 if (i >= not_stack + offset)
4080 emit_push_insn (operand_subword_force (x, i, mode),
4081 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4083 GEN_INT (args_offset + ((i - not_stack + skip)
4085 reg_parm_stack_space, alignment_pad);
4092 /* Push padding now if padding above and stack grows down,
4093 or if padding below and stack grows up.
4094 But if space already allocated, this has already been done. */
4095 if (extra && args_addr == 0
4096 && where_pad != none && where_pad != stack_direction)
4097 anti_adjust_stack (GEN_INT (extra));
4099 #ifdef PUSH_ROUNDING
4100 if (args_addr == 0 && PUSH_ARGS)
4101 emit_single_push_insn (mode, x, type);
4105 if (GET_CODE (args_so_far) == CONST_INT)
4107 = memory_address (mode,
4108 plus_constant (args_addr,
4109 INTVAL (args_so_far)));
4111 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4113 dest = gen_rtx_MEM (mode, addr);
4116 set_mem_attributes (dest, type, 1);
4117 /* Function incoming arguments may overlap with sibling call
4118 outgoing arguments and we cannot allow reordering of reads
4119 from function arguments with stores to outgoing arguments
4120 of sibling calls. */
4121 set_mem_alias_set (dest, 0);
4124 emit_move_insn (dest, x);
4128 /* If part should go in registers, copy that part
4129 into the appropriate registers. Do this now, at the end,
4130 since mem-to-mem copies above may do function calls. */
4131 if (partial > 0 && reg != 0)
4133 /* Handle calls that pass values in multiple non-contiguous locations.
4134 The Irix 6 ABI has examples of this. */
4135 if (GET_CODE (reg) == PARALLEL)
4136 emit_group_load (reg, x, -1); /* ??? size? */
4138 move_block_to_reg (REGNO (reg), x, partial, mode);
4141 if (extra && args_addr == 0 && where_pad == stack_direction)
4142 anti_adjust_stack (GEN_INT (extra));
4144 if (alignment_pad && args_addr == 0)
4145 anti_adjust_stack (alignment_pad);
4148 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4156 /* Only registers can be subtargets. */
4157 || GET_CODE (x) != REG
4158 /* If the register is readonly, it can't be set more than once. */
4159 || RTX_UNCHANGING_P (x)
4160 /* Don't use hard regs to avoid extending their life. */
4161 || REGNO (x) < FIRST_PSEUDO_REGISTER
4162 /* Avoid subtargets inside loops,
4163 since they hide some invariant expressions. */
4164 || preserve_subexpressions_p ())
4168 /* Expand an assignment that stores the value of FROM into TO.
4169 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4170 (This may contain a QUEUED rtx;
4171 if the value is constant, this rtx is a constant.)
4172 Otherwise, the returned value is NULL_RTX.
4174 SUGGEST_REG is no longer actually used.
4175 It used to mean, copy the value through a register
4176 and return that register, if that is possible.
4177 We now use WANT_VALUE to decide whether to do this. */
4180 expand_assignment (to, from, want_value, suggest_reg)
4183 int suggest_reg ATTRIBUTE_UNUSED;
4188 /* Don't crash if the lhs of the assignment was erroneous. */
4190 if (TREE_CODE (to) == ERROR_MARK)
4192 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4193 return want_value ? result : NULL_RTX;
4196 /* Assignment of a structure component needs special treatment
4197 if the structure component's rtx is not simply a MEM.
4198 Assignment of an array element at a constant index, and assignment of
4199 an array element in an unaligned packed structure field, has the same
4202 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4203 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4204 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4206 enum machine_mode mode1;
4207 HOST_WIDE_INT bitsize, bitpos;
4215 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4216 &unsignedp, &volatilep);
4218 /* If we are going to use store_bit_field and extract_bit_field,
4219 make sure to_rtx will be safe for multiple use. */
4221 if (mode1 == VOIDmode && want_value)
4222 tem = stabilize_reference (tem);
4224 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4228 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4230 if (GET_CODE (to_rtx) != MEM)
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 if (GET_MODE (offset_rtx) != Pmode)
4235 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4237 if (GET_MODE (offset_rtx) != ptr_mode)
4238 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4241 /* A constant address in TO_RTX can have VOIDmode, we must not try
4242 to call force_reg for that case. Avoid that case. */
4243 if (GET_CODE (to_rtx) == MEM
4244 && GET_MODE (to_rtx) == BLKmode
4245 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4247 && (bitpos % bitsize) == 0
4248 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4249 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4251 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4255 to_rtx = offset_address (to_rtx, offset_rtx,
4256 highest_pow2_factor_for_type (TREE_TYPE (to),
4260 if (GET_CODE (to_rtx) == MEM)
4262 /* If the field is at offset zero, we could have been given the
4263 DECL_RTX of the parent struct. Don't munge it. */
4264 to_rtx = shallow_copy_rtx (to_rtx);
4266 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4269 /* Deal with volatile and readonly fields. The former is only done
4270 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4271 if (volatilep && GET_CODE (to_rtx) == MEM)
4273 if (to_rtx == orig_to_rtx)
4274 to_rtx = copy_rtx (to_rtx);
4275 MEM_VOLATILE_P (to_rtx) = 1;
4278 if (TREE_CODE (to) == COMPONENT_REF
4279 && TREE_READONLY (TREE_OPERAND (to, 1)))
4281 if (to_rtx == orig_to_rtx)
4282 to_rtx = copy_rtx (to_rtx);
4283 RTX_UNCHANGING_P (to_rtx) = 1;
4286 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4288 if (to_rtx == orig_to_rtx)
4289 to_rtx = copy_rtx (to_rtx);
4290 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4293 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4295 /* Spurious cast for HPUX compiler. */
4296 ? ((enum machine_mode)
4297 TYPE_MODE (TREE_TYPE (to)))
4299 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4301 preserve_temp_slots (result);
4305 /* If the value is meaningful, convert RESULT to the proper mode.
4306 Otherwise, return nothing. */
4307 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4308 TYPE_MODE (TREE_TYPE (from)),
4310 TREE_UNSIGNED (TREE_TYPE (to)))
4314 /* If the rhs is a function call and its value is not an aggregate,
4315 call the function before we start to compute the lhs.
4316 This is needed for correct code for cases such as
4317 val = setjmp (buf) on machines where reference to val
4318 requires loading up part of an address in a separate insn.
4320 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4321 since it might be a promoted variable where the zero- or sign- extension
4322 needs to be done. Handling this in the normal way is safe because no
4323 computation is done before the call. */
4324 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4325 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4326 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4327 && GET_CODE (DECL_RTL (to)) == REG))
4332 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4334 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4336 /* Handle calls that return values in multiple non-contiguous locations.
4337 The Irix 6 ABI has examples of this. */
4338 if (GET_CODE (to_rtx) == PARALLEL)
4339 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4340 else if (GET_MODE (to_rtx) == BLKmode)
4341 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4344 #ifdef POINTERS_EXTEND_UNSIGNED
4345 if (POINTER_TYPE_P (TREE_TYPE (to))
4346 && GET_MODE (to_rtx) != GET_MODE (value))
4347 value = convert_memory_address (GET_MODE (to_rtx), value);
4349 emit_move_insn (to_rtx, value);
4351 preserve_temp_slots (to_rtx);
4354 return want_value ? to_rtx : NULL_RTX;
4357 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4358 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4361 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4363 /* Don't move directly into a return register. */
4364 if (TREE_CODE (to) == RESULT_DECL
4365 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4370 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4372 if (GET_CODE (to_rtx) == PARALLEL)
4373 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4375 emit_move_insn (to_rtx, temp);
4377 preserve_temp_slots (to_rtx);
4380 return want_value ? to_rtx : NULL_RTX;
4383 /* In case we are returning the contents of an object which overlaps
4384 the place the value is being stored, use a safe function when copying
4385 a value through a pointer into a structure value return block. */
4386 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4387 && current_function_returns_struct
4388 && !current_function_returns_pcc_struct)
4393 size = expr_size (from);
4394 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4396 if (TARGET_MEM_FUNCTIONS)
4397 emit_library_call (memmove_libfunc, LCT_NORMAL,
4398 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4399 XEXP (from_rtx, 0), Pmode,
4400 convert_to_mode (TYPE_MODE (sizetype),
4401 size, TREE_UNSIGNED (sizetype)),
4402 TYPE_MODE (sizetype));
4404 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4405 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4406 XEXP (to_rtx, 0), Pmode,
4407 convert_to_mode (TYPE_MODE (integer_type_node),
4409 TREE_UNSIGNED (integer_type_node)),
4410 TYPE_MODE (integer_type_node));
4412 preserve_temp_slots (to_rtx);
4415 return want_value ? to_rtx : NULL_RTX;
4418 /* Compute FROM and store the value in the rtx we got. */
4421 result = store_expr (from, to_rtx, want_value);
4422 preserve_temp_slots (result);
4425 return want_value ? result : NULL_RTX;
4428 /* Generate code for computing expression EXP,
4429 and storing the value into TARGET.
4430 TARGET may contain a QUEUED rtx.
4432 If WANT_VALUE & 1 is nonzero, return a copy of the value
4433 not in TARGET, so that we can be sure to use the proper
4434 value in a containing expression even if TARGET has something
4435 else stored in it. If possible, we copy the value through a pseudo
4436 and return that pseudo. Or, if the value is constant, we try to
4437 return the constant. In some cases, we return a pseudo
4438 copied *from* TARGET.
4440 If the mode is BLKmode then we may return TARGET itself.
4441 It turns out that in BLKmode it doesn't cause a problem.
4442 because C has no operators that could combine two different
4443 assignments into the same BLKmode object with different values
4444 with no sequence point. Will other languages need this to
4447 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4448 to catch quickly any cases where the caller uses the value
4449 and fails to set WANT_VALUE.
4451 If WANT_VALUE & 2 is set, this is a store into a call param on the
4452 stack, and block moves may need to be treated specially. */
4455 store_expr (exp, target, want_value)
4461 int dont_return_target = 0;
4462 int dont_store_target = 0;
4464 if (VOID_TYPE_P (TREE_TYPE (exp)))
4466 /* C++ can generate ?: expressions with a throw expression in one
4467 branch and an rvalue in the other. Here, we resolve attempts to
4468 store the throw expression's nonexistant result. */
4471 expand_expr (exp, const0_rtx, VOIDmode, 0);
4474 if (TREE_CODE (exp) == COMPOUND_EXPR)
4476 /* Perform first part of compound expression, then assign from second
4478 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4479 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4481 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4483 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4485 /* For conditional expression, get safe form of the target. Then
4486 test the condition, doing the appropriate assignment on either
4487 side. This avoids the creation of unnecessary temporaries.
4488 For non-BLKmode, it is more efficient not to do this. */
4490 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4493 target = protect_from_queue (target, 1);
4495 do_pending_stack_adjust ();
4497 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4498 start_cleanup_deferral ();
4499 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4500 end_cleanup_deferral ();
4502 emit_jump_insn (gen_jump (lab2));
4505 start_cleanup_deferral ();
4506 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4507 end_cleanup_deferral ();
4512 return want_value & 1 ? target : NULL_RTX;
4514 else if (queued_subexp_p (target))
4515 /* If target contains a postincrement, let's not risk
4516 using it as the place to generate the rhs. */
4518 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4520 /* Expand EXP into a new pseudo. */
4521 temp = gen_reg_rtx (GET_MODE (target));
4522 temp = expand_expr (exp, temp, GET_MODE (target),
4524 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4527 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4529 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4531 /* If target is volatile, ANSI requires accessing the value
4532 *from* the target, if it is accessed. So make that happen.
4533 In no case return the target itself. */
4534 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4535 dont_return_target = 1;
4537 else if ((want_value & 1) != 0
4538 && GET_CODE (target) == MEM
4539 && ! MEM_VOLATILE_P (target)
4540 && GET_MODE (target) != BLKmode)
4541 /* If target is in memory and caller wants value in a register instead,
4542 arrange that. Pass TARGET as target for expand_expr so that,
4543 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4544 We know expand_expr will not use the target in that case.
4545 Don't do this if TARGET is volatile because we are supposed
4546 to write it and then read it. */
4548 temp = expand_expr (exp, target, GET_MODE (target),
4549 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4550 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4552 /* If TEMP is already in the desired TARGET, only copy it from
4553 memory and don't store it there again. */
4555 || (rtx_equal_p (temp, target)
4556 && ! side_effects_p (temp) && ! side_effects_p (target)))
4557 dont_store_target = 1;
4558 temp = copy_to_reg (temp);
4560 dont_return_target = 1;
4562 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4563 /* If this is a scalar in a register that is stored in a wider mode
4564 than the declared mode, compute the result into its declared mode
4565 and then convert to the wider mode. Our value is the computed
4568 rtx inner_target = 0;
4570 /* If we don't want a value, we can do the conversion inside EXP,
4571 which will often result in some optimizations. Do the conversion
4572 in two steps: first change the signedness, if needed, then
4573 the extend. But don't do this if the type of EXP is a subtype
4574 of something else since then the conversion might involve
4575 more than just converting modes. */
4576 if ((want_value & 1) == 0
4577 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4578 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4580 if (TREE_UNSIGNED (TREE_TYPE (exp))
4581 != SUBREG_PROMOTED_UNSIGNED_P (target))
4583 ((*lang_hooks.types.signed_or_unsigned_type)
4584 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4586 exp = convert ((*lang_hooks.types.type_for_mode)
4587 (GET_MODE (SUBREG_REG (target)),
4588 SUBREG_PROMOTED_UNSIGNED_P (target)),
4591 inner_target = SUBREG_REG (target);
4594 temp = expand_expr (exp, inner_target, VOIDmode,
4595 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4597 /* If TEMP is a MEM and we want a result value, make the access
4598 now so it gets done only once. Strictly speaking, this is
4599 only necessary if the MEM is volatile, or if the address
4600 overlaps TARGET. But not performing the load twice also
4601 reduces the amount of rtl we generate and then have to CSE. */
4602 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4603 temp = copy_to_reg (temp);
4605 /* If TEMP is a VOIDmode constant, use convert_modes to make
4606 sure that we properly convert it. */
4607 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4609 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4610 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4611 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4612 GET_MODE (target), temp,
4613 SUBREG_PROMOTED_UNSIGNED_P (target));
4616 convert_move (SUBREG_REG (target), temp,
4617 SUBREG_PROMOTED_UNSIGNED_P (target));
4619 /* If we promoted a constant, change the mode back down to match
4620 target. Otherwise, the caller might get confused by a result whose
4621 mode is larger than expected. */
4623 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4625 if (GET_MODE (temp) != VOIDmode)
4627 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4628 SUBREG_PROMOTED_VAR_P (temp) = 1;
4629 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4630 SUBREG_PROMOTED_UNSIGNED_P (target));
4633 temp = convert_modes (GET_MODE (target),
4634 GET_MODE (SUBREG_REG (target)),
4635 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4638 return want_value & 1 ? temp : NULL_RTX;
4642 temp = expand_expr (exp, target, GET_MODE (target),
4643 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4644 /* Return TARGET if it's a specified hardware register.
4645 If TARGET is a volatile mem ref, either return TARGET
4646 or return a reg copied *from* TARGET; ANSI requires this.
4648 Otherwise, if TEMP is not TARGET, return TEMP
4649 if it is constant (for efficiency),
4650 or if we really want the correct value. */
4651 if (!(target && GET_CODE (target) == REG
4652 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4653 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4654 && ! rtx_equal_p (temp, target)
4655 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4656 dont_return_target = 1;
4659 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4660 the same as that of TARGET, adjust the constant. This is needed, for
4661 example, in case it is a CONST_DOUBLE and we want only a word-sized
4663 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4664 && TREE_CODE (exp) != ERROR_MARK
4665 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4666 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4667 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4669 /* If value was not generated in the target, store it there.
4670 Convert the value to TARGET's type first if necessary.
4671 If TEMP and TARGET compare equal according to rtx_equal_p, but
4672 one or both of them are volatile memory refs, we have to distinguish
4674 - expand_expr has used TARGET. In this case, we must not generate
4675 another copy. This can be detected by TARGET being equal according
4677 - expand_expr has not used TARGET - that means that the source just
4678 happens to have the same RTX form. Since temp will have been created
4679 by expand_expr, it will compare unequal according to == .
4680 We must generate a copy in this case, to reach the correct number
4681 of volatile memory references. */
4683 if ((! rtx_equal_p (temp, target)
4684 || (temp != target && (side_effects_p (temp)
4685 || side_effects_p (target))))
4686 && TREE_CODE (exp) != ERROR_MARK
4687 && ! dont_store_target
4688 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4689 but TARGET is not valid memory reference, TEMP will differ
4690 from TARGET although it is really the same location. */
4691 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4692 || target != DECL_RTL_IF_SET (exp))
4693 /* If there's nothing to copy, don't bother. Don't call expr_size
4694 unless necessary, because some front-ends (C++) expr_size-hook
4695 aborts on objects that are not supposed to be bit-copied or
4697 && expr_size (exp) != const0_rtx)
4699 target = protect_from_queue (target, 1);
4700 if (GET_MODE (temp) != GET_MODE (target)
4701 && GET_MODE (temp) != VOIDmode)
4703 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4704 if (dont_return_target)
4706 /* In this case, we will return TEMP,
4707 so make sure it has the proper mode.
4708 But don't forget to store the value into TARGET. */
4709 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4710 emit_move_insn (target, temp);
4713 convert_move (target, temp, unsignedp);
4716 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4718 /* Handle copying a string constant into an array. The string
4719 constant may be shorter than the array. So copy just the string's
4720 actual length, and clear the rest. First get the size of the data
4721 type of the string, which is actually the size of the target. */
4722 rtx size = expr_size (exp);
4724 if (GET_CODE (size) == CONST_INT
4725 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4726 emit_block_move (target, temp, size,
4728 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4731 /* Compute the size of the data to copy from the string. */
4733 = size_binop (MIN_EXPR,
4734 make_tree (sizetype, size),
4735 size_int (TREE_STRING_LENGTH (exp)));
4737 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4739 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4742 /* Copy that much. */
4743 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4744 TREE_UNSIGNED (sizetype));
4745 emit_block_move (target, temp, copy_size_rtx,
4747 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4749 /* Figure out how much is left in TARGET that we have to clear.
4750 Do all calculations in ptr_mode. */
4751 if (GET_CODE (copy_size_rtx) == CONST_INT)
4753 size = plus_constant (size, -INTVAL (copy_size_rtx));
4754 target = adjust_address (target, BLKmode,
4755 INTVAL (copy_size_rtx));
4759 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4760 copy_size_rtx, NULL_RTX, 0,
4763 #ifdef POINTERS_EXTEND_UNSIGNED
4764 if (GET_MODE (copy_size_rtx) != Pmode)
4765 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4766 TREE_UNSIGNED (sizetype));
4769 target = offset_address (target, copy_size_rtx,
4770 highest_pow2_factor (copy_size));
4771 label = gen_label_rtx ();
4772 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4773 GET_MODE (size), 0, label);
4776 if (size != const0_rtx)
4777 clear_storage (target, size);
4783 /* Handle calls that return values in multiple non-contiguous locations.
4784 The Irix 6 ABI has examples of this. */
4785 else if (GET_CODE (target) == PARALLEL)
4786 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4787 else if (GET_MODE (temp) == BLKmode)
4788 emit_block_move (target, temp, expr_size (exp),
4790 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4792 emit_move_insn (target, temp);
4795 /* If we don't want a value, return NULL_RTX. */
4796 if ((want_value & 1) == 0)
4799 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4800 ??? The latter test doesn't seem to make sense. */
4801 else if (dont_return_target && GET_CODE (temp) != MEM)
4804 /* Return TARGET itself if it is a hard register. */
4805 else if ((want_value & 1) != 0
4806 && GET_MODE (target) != BLKmode
4807 && ! (GET_CODE (target) == REG
4808 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4809 return copy_to_reg (target);
4815 /* Return 1 if EXP just contains zeros. */
4823 switch (TREE_CODE (exp))
4827 case NON_LVALUE_EXPR:
4828 case VIEW_CONVERT_EXPR:
4829 return is_zeros_p (TREE_OPERAND (exp, 0));
4832 return integer_zerop (exp);
4836 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4839 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4842 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4843 elt = TREE_CHAIN (elt))
4844 if (!is_zeros_p (TREE_VALUE (elt)))
4850 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4851 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4852 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4853 if (! is_zeros_p (TREE_VALUE (elt)))
4863 /* Return 1 if EXP contains mostly (3/4) zeros. */
4866 mostly_zeros_p (exp)
4869 if (TREE_CODE (exp) == CONSTRUCTOR)
4871 int elts = 0, zeros = 0;
4872 tree elt = CONSTRUCTOR_ELTS (exp);
4873 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4875 /* If there are no ranges of true bits, it is all zero. */
4876 return elt == NULL_TREE;
4878 for (; elt; elt = TREE_CHAIN (elt))
4880 /* We do not handle the case where the index is a RANGE_EXPR,
4881 so the statistic will be somewhat inaccurate.
4882 We do make a more accurate count in store_constructor itself,
4883 so since this function is only used for nested array elements,
4884 this should be close enough. */
4885 if (mostly_zeros_p (TREE_VALUE (elt)))
4890 return 4 * zeros >= 3 * elts;
4893 return is_zeros_p (exp);
4896 /* Helper function for store_constructor.
4897 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4898 TYPE is the type of the CONSTRUCTOR, not the element type.
4899 CLEARED is as for store_constructor.
4900 ALIAS_SET is the alias set to use for any stores.
4902 This provides a recursive shortcut back to store_constructor when it isn't
4903 necessary to go through store_field. This is so that we can pass through
4904 the cleared field to let store_constructor know that we may not have to
4905 clear a substructure if the outer structure has already been cleared. */
4908 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4911 unsigned HOST_WIDE_INT bitsize;
4912 HOST_WIDE_INT bitpos;
4913 enum machine_mode mode;
4918 if (TREE_CODE (exp) == CONSTRUCTOR
4919 && bitpos % BITS_PER_UNIT == 0
4920 /* If we have a nonzero bitpos for a register target, then we just
4921 let store_field do the bitfield handling. This is unlikely to
4922 generate unnecessary clear instructions anyways. */
4923 && (bitpos == 0 || GET_CODE (target) == MEM))
4925 if (GET_CODE (target) == MEM)
4927 = adjust_address (target,
4928 GET_MODE (target) == BLKmode
4930 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4931 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4934 /* Update the alias set, if required. */
4935 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4936 && MEM_ALIAS_SET (target) != 0)
4938 target = copy_rtx (target);
4939 set_mem_alias_set (target, alias_set);
4942 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4945 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4949 /* Store the value of constructor EXP into the rtx TARGET.
4950 TARGET is either a REG or a MEM; we know it cannot conflict, since
4951 safe_from_p has been called.
4952 CLEARED is true if TARGET is known to have been zero'd.
4953 SIZE is the number of bytes of TARGET we are allowed to modify: this
4954 may not be the same as the size of EXP if we are assigning to a field
4955 which has been packed to exclude padding bits. */
4958 store_constructor (exp, target, cleared, size)
4964 tree type = TREE_TYPE (exp);
4965 #ifdef WORD_REGISTER_OPERATIONS
4966 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4969 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4970 || TREE_CODE (type) == QUAL_UNION_TYPE)
4974 /* We either clear the aggregate or indicate the value is dead. */
4975 if ((TREE_CODE (type) == UNION_TYPE
4976 || TREE_CODE (type) == QUAL_UNION_TYPE)
4978 && ! CONSTRUCTOR_ELTS (exp))
4979 /* If the constructor is empty, clear the union. */
4981 clear_storage (target, expr_size (exp));
4985 /* If we are building a static constructor into a register,
4986 set the initial value as zero so we can fold the value into
4987 a constant. But if more than one register is involved,
4988 this probably loses. */
4989 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4990 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4992 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4996 /* If the constructor has fewer fields than the structure
4997 or if we are initializing the structure to mostly zeros,
4998 clear the whole structure first. Don't do this if TARGET is a
4999 register whose mode size isn't equal to SIZE since clear_storage
5000 can't handle this case. */
5001 else if (! cleared && size > 0
5002 && ((list_length (CONSTRUCTOR_ELTS (exp))
5003 != fields_length (type))
5004 || mostly_zeros_p (exp))
5005 && (GET_CODE (target) != REG
5006 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5009 rtx xtarget = target;
5011 if (readonly_fields_p (type))
5013 xtarget = copy_rtx (xtarget);
5014 RTX_UNCHANGING_P (xtarget) = 1;
5017 clear_storage (xtarget, GEN_INT (size));
5022 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5024 /* Store each element of the constructor into
5025 the corresponding field of TARGET. */
5027 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5029 tree field = TREE_PURPOSE (elt);
5030 tree value = TREE_VALUE (elt);
5031 enum machine_mode mode;
5032 HOST_WIDE_INT bitsize;
5033 HOST_WIDE_INT bitpos = 0;
5035 rtx to_rtx = target;
5037 /* Just ignore missing fields.
5038 We cleared the whole structure, above,
5039 if any fields are missing. */
5043 if (cleared && is_zeros_p (value))
5046 if (host_integerp (DECL_SIZE (field), 1))
5047 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5051 mode = DECL_MODE (field);
5052 if (DECL_BIT_FIELD (field))
5055 offset = DECL_FIELD_OFFSET (field);
5056 if (host_integerp (offset, 0)
5057 && host_integerp (bit_position (field), 0))
5059 bitpos = int_bit_position (field);
5063 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5069 if (CONTAINS_PLACEHOLDER_P (offset))
5070 offset = build (WITH_RECORD_EXPR, sizetype,
5071 offset, make_tree (TREE_TYPE (exp), target));
5073 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5074 if (GET_CODE (to_rtx) != MEM)
5077 #ifdef POINTERS_EXTEND_UNSIGNED
5078 if (GET_MODE (offset_rtx) != Pmode)
5079 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5081 if (GET_MODE (offset_rtx) != ptr_mode)
5082 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5085 to_rtx = offset_address (to_rtx, offset_rtx,
5086 highest_pow2_factor (offset));
5089 if (TREE_READONLY (field))
5091 if (GET_CODE (to_rtx) == MEM)
5092 to_rtx = copy_rtx (to_rtx);
5094 RTX_UNCHANGING_P (to_rtx) = 1;
5097 #ifdef WORD_REGISTER_OPERATIONS
5098 /* If this initializes a field that is smaller than a word, at the
5099 start of a word, try to widen it to a full word.
5100 This special case allows us to output C++ member function
5101 initializations in a form that the optimizers can understand. */
5102 if (GET_CODE (target) == REG
5103 && bitsize < BITS_PER_WORD
5104 && bitpos % BITS_PER_WORD == 0
5105 && GET_MODE_CLASS (mode) == MODE_INT
5106 && TREE_CODE (value) == INTEGER_CST
5108 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5110 tree type = TREE_TYPE (value);
5112 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5114 type = (*lang_hooks.types.type_for_size)
5115 (BITS_PER_WORD, TREE_UNSIGNED (type));
5116 value = convert (type, value);
5119 if (BYTES_BIG_ENDIAN)
5121 = fold (build (LSHIFT_EXPR, type, value,
5122 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5123 bitsize = BITS_PER_WORD;
5128 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5129 && DECL_NONADDRESSABLE_P (field))
5131 to_rtx = copy_rtx (to_rtx);
5132 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5135 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5136 value, type, cleared,
5137 get_alias_set (TREE_TYPE (field)));
5140 else if (TREE_CODE (type) == ARRAY_TYPE
5141 || TREE_CODE (type) == VECTOR_TYPE)
5146 tree domain = TYPE_DOMAIN (type);
5147 tree elttype = TREE_TYPE (type);
5149 HOST_WIDE_INT minelt = 0;
5150 HOST_WIDE_INT maxelt = 0;
5152 /* Vectors are like arrays, but the domain is stored via an array
5154 if (TREE_CODE (type) == VECTOR_TYPE)
5156 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5157 the same field as TYPE_DOMAIN, we are not guaranteed that
5159 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5160 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5163 const_bounds_p = (TYPE_MIN_VALUE (domain)
5164 && TYPE_MAX_VALUE (domain)
5165 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5166 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5168 /* If we have constant bounds for the range of the type, get them. */
5171 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5172 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5175 /* If the constructor has fewer elements than the array,
5176 clear the whole array first. Similarly if this is
5177 static constructor of a non-BLKmode object. */
5178 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5182 HOST_WIDE_INT count = 0, zero_count = 0;
5183 need_to_clear = ! const_bounds_p;
5185 /* This loop is a more accurate version of the loop in
5186 mostly_zeros_p (it handles RANGE_EXPR in an index).
5187 It is also needed to check for missing elements. */
5188 for (elt = CONSTRUCTOR_ELTS (exp);
5189 elt != NULL_TREE && ! need_to_clear;
5190 elt = TREE_CHAIN (elt))
5192 tree index = TREE_PURPOSE (elt);
5193 HOST_WIDE_INT this_node_count;
5195 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5197 tree lo_index = TREE_OPERAND (index, 0);
5198 tree hi_index = TREE_OPERAND (index, 1);
5200 if (! host_integerp (lo_index, 1)
5201 || ! host_integerp (hi_index, 1))
5207 this_node_count = (tree_low_cst (hi_index, 1)
5208 - tree_low_cst (lo_index, 1) + 1);
5211 this_node_count = 1;
5213 count += this_node_count;
5214 if (mostly_zeros_p (TREE_VALUE (elt)))
5215 zero_count += this_node_count;
5218 /* Clear the entire array first if there are any missing elements,
5219 or if the incidence of zero elements is >= 75%. */
5221 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5225 if (need_to_clear && size > 0)
5230 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5232 clear_storage (target, GEN_INT (size));
5236 else if (REG_P (target))
5237 /* Inform later passes that the old value is dead. */
5238 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5240 /* Store each element of the constructor into
5241 the corresponding element of TARGET, determined
5242 by counting the elements. */
5243 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5245 elt = TREE_CHAIN (elt), i++)
5247 enum machine_mode mode;
5248 HOST_WIDE_INT bitsize;
5249 HOST_WIDE_INT bitpos;
5251 tree value = TREE_VALUE (elt);
5252 tree index = TREE_PURPOSE (elt);
5253 rtx xtarget = target;
5255 if (cleared && is_zeros_p (value))
5258 unsignedp = TREE_UNSIGNED (elttype);
5259 mode = TYPE_MODE (elttype);
5260 if (mode == BLKmode)
5261 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5262 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5265 bitsize = GET_MODE_BITSIZE (mode);
5267 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5269 tree lo_index = TREE_OPERAND (index, 0);
5270 tree hi_index = TREE_OPERAND (index, 1);
5271 rtx index_r, pos_rtx, loop_end;
5272 struct nesting *loop;
5273 HOST_WIDE_INT lo, hi, count;
5276 /* If the range is constant and "small", unroll the loop. */
5278 && host_integerp (lo_index, 0)
5279 && host_integerp (hi_index, 0)
5280 && (lo = tree_low_cst (lo_index, 0),
5281 hi = tree_low_cst (hi_index, 0),
5282 count = hi - lo + 1,
5283 (GET_CODE (target) != MEM
5285 || (host_integerp (TYPE_SIZE (elttype), 1)
5286 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5289 lo -= minelt; hi -= minelt;
5290 for (; lo <= hi; lo++)
5292 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5294 if (GET_CODE (target) == MEM
5295 && !MEM_KEEP_ALIAS_SET_P (target)
5296 && TREE_CODE (type) == ARRAY_TYPE
5297 && TYPE_NONALIASED_COMPONENT (type))
5299 target = copy_rtx (target);
5300 MEM_KEEP_ALIAS_SET_P (target) = 1;
5303 store_constructor_field
5304 (target, bitsize, bitpos, mode, value, type, cleared,
5305 get_alias_set (elttype));
5310 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5311 loop_end = gen_label_rtx ();
5313 unsignedp = TREE_UNSIGNED (domain);
5315 index = build_decl (VAR_DECL, NULL_TREE, domain);
5318 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5320 SET_DECL_RTL (index, index_r);
5321 if (TREE_CODE (value) == SAVE_EXPR
5322 && SAVE_EXPR_RTL (value) == 0)
5324 /* Make sure value gets expanded once before the
5326 expand_expr (value, const0_rtx, VOIDmode, 0);
5329 store_expr (lo_index, index_r, 0);
5330 loop = expand_start_loop (0);
5332 /* Assign value to element index. */
5334 = convert (ssizetype,
5335 fold (build (MINUS_EXPR, TREE_TYPE (index),
5336 index, TYPE_MIN_VALUE (domain))));
5337 position = size_binop (MULT_EXPR, position,
5339 TYPE_SIZE_UNIT (elttype)));
5341 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5342 xtarget = offset_address (target, pos_rtx,
5343 highest_pow2_factor (position));
5344 xtarget = adjust_address (xtarget, mode, 0);
5345 if (TREE_CODE (value) == CONSTRUCTOR)
5346 store_constructor (value, xtarget, cleared,
5347 bitsize / BITS_PER_UNIT);
5349 store_expr (value, xtarget, 0);
5351 expand_exit_loop_if_false (loop,
5352 build (LT_EXPR, integer_type_node,
5355 expand_increment (build (PREINCREMENT_EXPR,
5357 index, integer_one_node), 0, 0);
5359 emit_label (loop_end);
5362 else if ((index != 0 && ! host_integerp (index, 0))
5363 || ! host_integerp (TYPE_SIZE (elttype), 1))
5368 index = ssize_int (1);
5371 index = convert (ssizetype,
5372 fold (build (MINUS_EXPR, index,
5373 TYPE_MIN_VALUE (domain))));
5375 position = size_binop (MULT_EXPR, index,
5377 TYPE_SIZE_UNIT (elttype)));
5378 xtarget = offset_address (target,
5379 expand_expr (position, 0, VOIDmode, 0),
5380 highest_pow2_factor (position));
5381 xtarget = adjust_address (xtarget, mode, 0);
5382 store_expr (value, xtarget, 0);
5387 bitpos = ((tree_low_cst (index, 0) - minelt)
5388 * tree_low_cst (TYPE_SIZE (elttype), 1));
5390 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5392 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5393 && TREE_CODE (type) == ARRAY_TYPE
5394 && TYPE_NONALIASED_COMPONENT (type))
5396 target = copy_rtx (target);
5397 MEM_KEEP_ALIAS_SET_P (target) = 1;
5400 store_constructor_field (target, bitsize, bitpos, mode, value,
5401 type, cleared, get_alias_set (elttype));
5407 /* Set constructor assignments. */
5408 else if (TREE_CODE (type) == SET_TYPE)
5410 tree elt = CONSTRUCTOR_ELTS (exp);
5411 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5412 tree domain = TYPE_DOMAIN (type);
5413 tree domain_min, domain_max, bitlength;
5415 /* The default implementation strategy is to extract the constant
5416 parts of the constructor, use that to initialize the target,
5417 and then "or" in whatever non-constant ranges we need in addition.
5419 If a large set is all zero or all ones, it is
5420 probably better to set it using memset (if available) or bzero.
5421 Also, if a large set has just a single range, it may also be
5422 better to first clear all the first clear the set (using
5423 bzero/memset), and set the bits we want. */
5425 /* Check for all zeros. */
5426 if (elt == NULL_TREE && size > 0)
5429 clear_storage (target, GEN_INT (size));
5433 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5434 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5435 bitlength = size_binop (PLUS_EXPR,
5436 size_diffop (domain_max, domain_min),
5439 nbits = tree_low_cst (bitlength, 1);
5441 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5442 are "complicated" (more than one range), initialize (the
5443 constant parts) by copying from a constant. */
5444 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5445 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5447 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5448 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5449 char *bit_buffer = (char *) alloca (nbits);
5450 HOST_WIDE_INT word = 0;
5451 unsigned int bit_pos = 0;
5452 unsigned int ibit = 0;
5453 unsigned int offset = 0; /* In bytes from beginning of set. */
5455 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5458 if (bit_buffer[ibit])
5460 if (BYTES_BIG_ENDIAN)
5461 word |= (1 << (set_word_size - 1 - bit_pos));
5463 word |= 1 << bit_pos;
5467 if (bit_pos >= set_word_size || ibit == nbits)
5469 if (word != 0 || ! cleared)
5471 rtx datum = GEN_INT (word);
5474 /* The assumption here is that it is safe to use
5475 XEXP if the set is multi-word, but not if
5476 it's single-word. */
5477 if (GET_CODE (target) == MEM)
5478 to_rtx = adjust_address (target, mode, offset);
5479 else if (offset == 0)
5483 emit_move_insn (to_rtx, datum);
5490 offset += set_word_size / BITS_PER_UNIT;
5495 /* Don't bother clearing storage if the set is all ones. */
5496 if (TREE_CHAIN (elt) != NULL_TREE
5497 || (TREE_PURPOSE (elt) == NULL_TREE
5499 : ( ! host_integerp (TREE_VALUE (elt), 0)
5500 || ! host_integerp (TREE_PURPOSE (elt), 0)
5501 || (tree_low_cst (TREE_VALUE (elt), 0)
5502 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5503 != (HOST_WIDE_INT) nbits))))
5504 clear_storage (target, expr_size (exp));
5506 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5508 /* Start of range of element or NULL. */
5509 tree startbit = TREE_PURPOSE (elt);
5510 /* End of range of element, or element value. */
5511 tree endbit = TREE_VALUE (elt);
5512 HOST_WIDE_INT startb, endb;
5513 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5515 bitlength_rtx = expand_expr (bitlength,
5516 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5518 /* Handle non-range tuple element like [ expr ]. */
5519 if (startbit == NULL_TREE)
5521 startbit = save_expr (endbit);
5525 startbit = convert (sizetype, startbit);
5526 endbit = convert (sizetype, endbit);
5527 if (! integer_zerop (domain_min))
5529 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5530 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5532 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5533 EXPAND_CONST_ADDRESS);
5534 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5535 EXPAND_CONST_ADDRESS);
5541 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5542 (GET_MODE (target), 0),
5545 emit_move_insn (targetx, target);
5548 else if (GET_CODE (target) == MEM)
5553 /* Optimization: If startbit and endbit are constants divisible
5554 by BITS_PER_UNIT, call memset instead. */
5555 if (TARGET_MEM_FUNCTIONS
5556 && TREE_CODE (startbit) == INTEGER_CST
5557 && TREE_CODE (endbit) == INTEGER_CST
5558 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5559 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5561 emit_library_call (memset_libfunc, LCT_NORMAL,
5563 plus_constant (XEXP (targetx, 0),
5564 startb / BITS_PER_UNIT),
5566 constm1_rtx, TYPE_MODE (integer_type_node),
5567 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5568 TYPE_MODE (sizetype));
5571 emit_library_call (setbits_libfunc, LCT_NORMAL,
5572 VOIDmode, 4, XEXP (targetx, 0),
5573 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5574 startbit_rtx, TYPE_MODE (sizetype),
5575 endbit_rtx, TYPE_MODE (sizetype));
5578 emit_move_insn (target, targetx);
5586 /* Store the value of EXP (an expression tree)
5587 into a subfield of TARGET which has mode MODE and occupies
5588 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5589 If MODE is VOIDmode, it means that we are storing into a bit-field.
5591 If VALUE_MODE is VOIDmode, return nothing in particular.
5592 UNSIGNEDP is not used in this case.
5594 Otherwise, return an rtx for the value stored. This rtx
5595 has mode VALUE_MODE if that is convenient to do.
5596 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5598 TYPE is the type of the underlying object,
5600 ALIAS_SET is the alias set for the destination. This value will
5601 (in general) be different from that for TARGET, since TARGET is a
5602 reference to the containing structure. */
5605 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5608 HOST_WIDE_INT bitsize;
5609 HOST_WIDE_INT bitpos;
5610 enum machine_mode mode;
5612 enum machine_mode value_mode;
5617 HOST_WIDE_INT width_mask = 0;
5619 if (TREE_CODE (exp) == ERROR_MARK)
5622 /* If we have nothing to store, do nothing unless the expression has
5625 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5626 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5627 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5629 /* If we are storing into an unaligned field of an aligned union that is
5630 in a register, we may have the mode of TARGET being an integer mode but
5631 MODE == BLKmode. In that case, get an aligned object whose size and
5632 alignment are the same as TARGET and store TARGET into it (we can avoid
5633 the store if the field being stored is the entire width of TARGET). Then
5634 call ourselves recursively to store the field into a BLKmode version of
5635 that object. Finally, load from the object into TARGET. This is not
5636 very efficient in general, but should only be slightly more expensive
5637 than the otherwise-required unaligned accesses. Perhaps this can be
5638 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5639 twice, once with emit_move_insn and once via store_field. */
5642 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5644 rtx object = assign_temp (type, 0, 1, 1);
5645 rtx blk_object = adjust_address (object, BLKmode, 0);
5647 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5648 emit_move_insn (object, target);
5650 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5653 emit_move_insn (target, object);
5655 /* We want to return the BLKmode version of the data. */
5659 if (GET_CODE (target) == CONCAT)
5661 /* We're storing into a struct containing a single __complex. */
5665 return store_expr (exp, target, 0);
5668 /* If the structure is in a register or if the component
5669 is a bit field, we cannot use addressing to access it.
5670 Use bit-field techniques or SUBREG to store in it. */
5672 if (mode == VOIDmode
5673 || (mode != BLKmode && ! direct_store[(int) mode]
5674 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5675 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5676 || GET_CODE (target) == REG
5677 || GET_CODE (target) == SUBREG
5678 /* If the field isn't aligned enough to store as an ordinary memref,
5679 store it as a bit field. */
5681 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5682 || bitpos % GET_MODE_ALIGNMENT (mode))
5683 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5684 || (bitpos % BITS_PER_UNIT != 0)))
5685 /* If the RHS and field are a constant size and the size of the
5686 RHS isn't the same size as the bitfield, we must use bitfield
5689 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5690 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5692 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5694 /* If BITSIZE is narrower than the size of the type of EXP
5695 we will be narrowing TEMP. Normally, what's wanted are the
5696 low-order bits. However, if EXP's type is a record and this is
5697 big-endian machine, we want the upper BITSIZE bits. */
5698 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5699 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5700 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5701 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5702 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5706 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5708 if (mode != VOIDmode && mode != BLKmode
5709 && mode != TYPE_MODE (TREE_TYPE (exp)))
5710 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5712 /* If the modes of TARGET and TEMP are both BLKmode, both
5713 must be in memory and BITPOS must be aligned on a byte
5714 boundary. If so, we simply do a block copy. */
5715 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5717 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5718 || bitpos % BITS_PER_UNIT != 0)
5721 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5722 emit_block_move (target, temp,
5723 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5727 return value_mode == VOIDmode ? const0_rtx : target;
5730 /* Store the value in the bitfield. */
5731 store_bit_field (target, bitsize, bitpos, mode, temp,
5732 int_size_in_bytes (type));
5734 if (value_mode != VOIDmode)
5736 /* The caller wants an rtx for the value.
5737 If possible, avoid refetching from the bitfield itself. */
5739 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5742 enum machine_mode tmode;
5744 tmode = GET_MODE (temp);
5745 if (tmode == VOIDmode)
5749 return expand_and (tmode, temp,
5750 gen_int_mode (width_mask, tmode),
5753 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5754 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5755 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5758 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5759 NULL_RTX, value_mode, VOIDmode,
5760 int_size_in_bytes (type));
5766 rtx addr = XEXP (target, 0);
5767 rtx to_rtx = target;
5769 /* If a value is wanted, it must be the lhs;
5770 so make the address stable for multiple use. */
5772 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5773 && ! CONSTANT_ADDRESS_P (addr)
5774 /* A frame-pointer reference is already stable. */
5775 && ! (GET_CODE (addr) == PLUS
5776 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5777 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5778 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5779 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5781 /* Now build a reference to just the desired component. */
5783 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5785 if (to_rtx == target)
5786 to_rtx = copy_rtx (to_rtx);
5788 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5789 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5790 set_mem_alias_set (to_rtx, alias_set);
5792 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5796 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5797 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5798 codes and find the ultimate containing object, which we return.
5800 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5801 bit position, and *PUNSIGNEDP to the signedness of the field.
5802 If the position of the field is variable, we store a tree
5803 giving the variable offset (in units) in *POFFSET.
5804 This offset is in addition to the bit position.
5805 If the position is not variable, we store 0 in *POFFSET.
5807 If any of the extraction expressions is volatile,
5808 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5810 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5811 is a mode that can be used to access the field. In that case, *PBITSIZE
5814 If the field describes a variable-sized object, *PMODE is set to
5815 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5816 this case, but the address of the object can be found. */
5819 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5820 punsignedp, pvolatilep)
5822 HOST_WIDE_INT *pbitsize;
5823 HOST_WIDE_INT *pbitpos;
5825 enum machine_mode *pmode;
5830 enum machine_mode mode = VOIDmode;
5831 tree offset = size_zero_node;
5832 tree bit_offset = bitsize_zero_node;
5833 tree placeholder_ptr = 0;
5836 /* First get the mode, signedness, and size. We do this from just the
5837 outermost expression. */
5838 if (TREE_CODE (exp) == COMPONENT_REF)
5840 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5841 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5842 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5844 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5846 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5848 size_tree = TREE_OPERAND (exp, 1);
5849 *punsignedp = TREE_UNSIGNED (exp);
5853 mode = TYPE_MODE (TREE_TYPE (exp));
5854 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5856 if (mode == BLKmode)
5857 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5859 *pbitsize = GET_MODE_BITSIZE (mode);
5864 if (! host_integerp (size_tree, 1))
5865 mode = BLKmode, *pbitsize = -1;
5867 *pbitsize = tree_low_cst (size_tree, 1);
5870 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5871 and find the ultimate containing object. */
5874 if (TREE_CODE (exp) == BIT_FIELD_REF)
5875 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5876 else if (TREE_CODE (exp) == COMPONENT_REF)
5878 tree field = TREE_OPERAND (exp, 1);
5879 tree this_offset = DECL_FIELD_OFFSET (field);
5881 /* If this field hasn't been filled in yet, don't go
5882 past it. This should only happen when folding expressions
5883 made during type construction. */
5884 if (this_offset == 0)
5886 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5887 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5889 offset = size_binop (PLUS_EXPR, offset, this_offset);
5890 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5891 DECL_FIELD_BIT_OFFSET (field));
5893 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5896 else if (TREE_CODE (exp) == ARRAY_REF
5897 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5899 tree index = TREE_OPERAND (exp, 1);
5900 tree array = TREE_OPERAND (exp, 0);
5901 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5902 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5903 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5905 /* We assume all arrays have sizes that are a multiple of a byte.
5906 First subtract the lower bound, if any, in the type of the
5907 index, then convert to sizetype and multiply by the size of the
5909 if (low_bound != 0 && ! integer_zerop (low_bound))
5910 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5913 /* If the index has a self-referential type, pass it to a
5914 WITH_RECORD_EXPR; if the component size is, pass our
5915 component to one. */
5916 if (CONTAINS_PLACEHOLDER_P (index))
5917 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5918 if (CONTAINS_PLACEHOLDER_P (unit_size))
5919 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5921 offset = size_binop (PLUS_EXPR, offset,
5922 size_binop (MULT_EXPR,
5923 convert (sizetype, index),
5927 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5929 tree new = find_placeholder (exp, &placeholder_ptr);
5931 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5932 We might have been called from tree optimization where we
5933 haven't set up an object yet. */
5942 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5943 conversions that don't change the mode, and all view conversions
5944 except those that need to "step up" the alignment. */
5945 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5946 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5947 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5948 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5950 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5951 < BIGGEST_ALIGNMENT)
5952 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5953 || TYPE_ALIGN_OK (TREE_TYPE
5954 (TREE_OPERAND (exp, 0))))))
5955 && ! ((TREE_CODE (exp) == NOP_EXPR
5956 || TREE_CODE (exp) == CONVERT_EXPR)
5957 && (TYPE_MODE (TREE_TYPE (exp))
5958 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5961 /* If any reference in the chain is volatile, the effect is volatile. */
5962 if (TREE_THIS_VOLATILE (exp))
5965 exp = TREE_OPERAND (exp, 0);
5968 /* If OFFSET is constant, see if we can return the whole thing as a
5969 constant bit position. Otherwise, split it up. */
5970 if (host_integerp (offset, 0)
5971 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5973 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5974 && host_integerp (tem, 0))
5975 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5977 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5983 /* Return 1 if T is an expression that get_inner_reference handles. */
5986 handled_component_p (t)
5989 switch (TREE_CODE (t))
5994 case ARRAY_RANGE_REF:
5995 case NON_LVALUE_EXPR:
5996 case VIEW_CONVERT_EXPR:
6001 return (TYPE_MODE (TREE_TYPE (t))
6002 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
6009 /* Given an rtx VALUE that may contain additions and multiplications, return
6010 an equivalent value that just refers to a register, memory, or constant.
6011 This is done by generating instructions to perform the arithmetic and
6012 returning a pseudo-register containing the value.
6014 The returned value may be a REG, SUBREG, MEM or constant. */
6017 force_operand (value, target)
6021 /* Use subtarget as the target for operand 0 of a binary operation. */
6022 rtx subtarget = get_subtarget (target);
6023 enum rtx_code code = GET_CODE (value);
6025 /* Check for a PIC address load. */
6026 if ((code == PLUS || code == MINUS)
6027 && XEXP (value, 0) == pic_offset_table_rtx
6028 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6029 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6030 || GET_CODE (XEXP (value, 1)) == CONST))
6033 subtarget = gen_reg_rtx (GET_MODE (value));
6034 emit_move_insn (subtarget, value);
6038 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
6041 target = gen_reg_rtx (GET_MODE (value));
6042 convert_move (target, force_operand (XEXP (value, 0), NULL),
6043 code == ZERO_EXTEND);
6047 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
6049 op2 = XEXP (value, 1);
6050 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
6052 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6055 op2 = negate_rtx (GET_MODE (value), op2);
6058 /* Check for an addition with OP2 a constant integer and our first
6059 operand a PLUS of a virtual register and something else. In that
6060 case, we want to emit the sum of the virtual register and the
6061 constant first and then add the other value. This allows virtual
6062 register instantiation to simply modify the constant rather than
6063 creating another one around this addition. */
6064 if (code == PLUS && GET_CODE (op2) == CONST_INT
6065 && GET_CODE (XEXP (value, 0)) == PLUS
6066 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6067 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6068 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6070 rtx temp = expand_simple_binop (GET_MODE (value), code,
6071 XEXP (XEXP (value, 0), 0), op2,
6072 subtarget, 0, OPTAB_LIB_WIDEN);
6073 return expand_simple_binop (GET_MODE (value), code, temp,
6074 force_operand (XEXP (XEXP (value,
6076 target, 0, OPTAB_LIB_WIDEN);
6079 op1 = force_operand (XEXP (value, 0), subtarget);
6080 op2 = force_operand (op2, NULL_RTX);
6084 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6086 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6087 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6088 target, 1, OPTAB_LIB_WIDEN);
6090 return expand_divmod (0,
6091 FLOAT_MODE_P (GET_MODE (value))
6092 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6093 GET_MODE (value), op1, op2, target, 0);
6096 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6100 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6104 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6108 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6109 target, 0, OPTAB_LIB_WIDEN);
6112 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6113 target, 1, OPTAB_LIB_WIDEN);
6116 if (GET_RTX_CLASS (code) == '1')
6118 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6119 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6122 #ifdef INSN_SCHEDULING
6123 /* On machines that have insn scheduling, we want all memory reference to be
6124 explicit, so we need to deal with such paradoxical SUBREGs. */
6125 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6126 && (GET_MODE_SIZE (GET_MODE (value))
6127 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6129 = simplify_gen_subreg (GET_MODE (value),
6130 force_reg (GET_MODE (SUBREG_REG (value)),
6131 force_operand (SUBREG_REG (value),
6133 GET_MODE (SUBREG_REG (value)),
6134 SUBREG_BYTE (value));
6140 /* Subroutine of expand_expr: return nonzero iff there is no way that
6141 EXP can reference X, which is being modified. TOP_P is nonzero if this
6142 call is going to be used to determine whether we need a temporary
6143 for EXP, as opposed to a recursive call to this function.
6145 It is always safe for this routine to return zero since it merely
6146 searches for optimization opportunities. */
6149 safe_from_p (x, exp, top_p)
6156 static tree save_expr_list;
6159 /* If EXP has varying size, we MUST use a target since we currently
6160 have no way of allocating temporaries of variable size
6161 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6162 So we assume here that something at a higher level has prevented a
6163 clash. This is somewhat bogus, but the best we can do. Only
6164 do this when X is BLKmode and when we are at the top level. */
6165 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6166 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6167 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6168 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6169 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6171 && GET_MODE (x) == BLKmode)
6172 /* If X is in the outgoing argument area, it is always safe. */
6173 || (GET_CODE (x) == MEM
6174 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6175 || (GET_CODE (XEXP (x, 0)) == PLUS
6176 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6179 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6180 find the underlying pseudo. */
6181 if (GET_CODE (x) == SUBREG)
6184 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6188 /* A SAVE_EXPR might appear many times in the expression passed to the
6189 top-level safe_from_p call, and if it has a complex subexpression,
6190 examining it multiple times could result in a combinatorial explosion.
6191 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6192 with optimization took about 28 minutes to compile -- even though it was
6193 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6194 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6195 we have processed. Note that the only test of top_p was above. */
6204 rtn = safe_from_p (x, exp, 0);
6206 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6207 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6212 /* Now look at our tree code and possibly recurse. */
6213 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6216 exp_rtl = DECL_RTL_IF_SET (exp);
6223 if (TREE_CODE (exp) == TREE_LIST)
6227 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6229 exp = TREE_CHAIN (exp);
6232 if (TREE_CODE (exp) != TREE_LIST)
6233 return safe_from_p (x, exp, 0);
6236 else if (TREE_CODE (exp) == ERROR_MARK)
6237 return 1; /* An already-visited SAVE_EXPR? */
6243 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6248 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6252 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6253 the expression. If it is set, we conflict iff we are that rtx or
6254 both are in memory. Otherwise, we check all operands of the
6255 expression recursively. */
6257 switch (TREE_CODE (exp))
6260 /* If the operand is static or we are static, we can't conflict.
6261 Likewise if we don't conflict with the operand at all. */
6262 if (staticp (TREE_OPERAND (exp, 0))
6263 || TREE_STATIC (exp)
6264 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6267 /* Otherwise, the only way this can conflict is if we are taking
6268 the address of a DECL a that address if part of X, which is
6270 exp = TREE_OPERAND (exp, 0);
6273 if (!DECL_RTL_SET_P (exp)
6274 || GET_CODE (DECL_RTL (exp)) != MEM)
6277 exp_rtl = XEXP (DECL_RTL (exp), 0);
6282 if (GET_CODE (x) == MEM
6283 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6284 get_alias_set (exp)))
6289 /* Assume that the call will clobber all hard registers and
6291 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6292 || GET_CODE (x) == MEM)
6297 /* If a sequence exists, we would have to scan every instruction
6298 in the sequence to see if it was safe. This is probably not
6300 if (RTL_EXPR_SEQUENCE (exp))
6303 exp_rtl = RTL_EXPR_RTL (exp);
6306 case WITH_CLEANUP_EXPR:
6307 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6310 case CLEANUP_POINT_EXPR:
6311 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6314 exp_rtl = SAVE_EXPR_RTL (exp);
6318 /* If we've already scanned this, don't do it again. Otherwise,
6319 show we've scanned it and record for clearing the flag if we're
6321 if (TREE_PRIVATE (exp))
6324 TREE_PRIVATE (exp) = 1;
6325 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6327 TREE_PRIVATE (exp) = 0;
6331 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6335 /* The only operand we look at is operand 1. The rest aren't
6336 part of the expression. */
6337 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6339 case METHOD_CALL_EXPR:
6340 /* This takes an rtx argument, but shouldn't appear here. */
6347 /* If we have an rtx, we do not need to scan our operands. */
6351 nops = first_rtl_op (TREE_CODE (exp));
6352 for (i = 0; i < nops; i++)
6353 if (TREE_OPERAND (exp, i) != 0
6354 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6357 /* If this is a language-specific tree code, it may require
6358 special handling. */
6359 if ((unsigned int) TREE_CODE (exp)
6360 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6361 && !(*lang_hooks.safe_from_p) (x, exp))
6365 /* If we have an rtl, find any enclosed object. Then see if we conflict
6369 if (GET_CODE (exp_rtl) == SUBREG)
6371 exp_rtl = SUBREG_REG (exp_rtl);
6372 if (GET_CODE (exp_rtl) == REG
6373 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6377 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6378 are memory and they conflict. */
6379 return ! (rtx_equal_p (x, exp_rtl)
6380 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6381 && true_dependence (exp_rtl, VOIDmode, x,
6382 rtx_addr_varies_p)));
6385 /* If we reach here, it is safe. */
6389 /* Subroutine of expand_expr: return rtx if EXP is a
6390 variable or parameter; else return 0. */
6397 switch (TREE_CODE (exp))
6401 return DECL_RTL (exp);
6407 #ifdef MAX_INTEGER_COMPUTATION_MODE
6410 check_max_integer_computation_mode (exp)
6413 enum tree_code code;
6414 enum machine_mode mode;
6416 /* Strip any NOPs that don't change the mode. */
6418 code = TREE_CODE (exp);
6420 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6421 if (code == NOP_EXPR
6422 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6425 /* First check the type of the overall operation. We need only look at
6426 unary, binary and relational operations. */
6427 if (TREE_CODE_CLASS (code) == '1'
6428 || TREE_CODE_CLASS (code) == '2'
6429 || TREE_CODE_CLASS (code) == '<')
6431 mode = TYPE_MODE (TREE_TYPE (exp));
6432 if (GET_MODE_CLASS (mode) == MODE_INT
6433 && mode > MAX_INTEGER_COMPUTATION_MODE)
6434 internal_error ("unsupported wide integer operation");
6437 /* Check operand of a unary op. */
6438 if (TREE_CODE_CLASS (code) == '1')
6440 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6441 if (GET_MODE_CLASS (mode) == MODE_INT
6442 && mode > MAX_INTEGER_COMPUTATION_MODE)
6443 internal_error ("unsupported wide integer operation");
6446 /* Check operands of a binary/comparison op. */
6447 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6449 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6450 if (GET_MODE_CLASS (mode) == MODE_INT
6451 && mode > MAX_INTEGER_COMPUTATION_MODE)
6452 internal_error ("unsupported wide integer operation");
6454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6455 if (GET_MODE_CLASS (mode) == MODE_INT
6456 && mode > MAX_INTEGER_COMPUTATION_MODE)
6457 internal_error ("unsupported wide integer operation");
6462 /* Return the highest power of two that EXP is known to be a multiple of.
6463 This is used in updating alignment of MEMs in array references. */
6465 static unsigned HOST_WIDE_INT
6466 highest_pow2_factor (exp)
6469 unsigned HOST_WIDE_INT c0, c1;
6471 switch (TREE_CODE (exp))
6474 /* We can find the lowest bit that's a one. If the low
6475 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6476 We need to handle this case since we can find it in a COND_EXPR,
6477 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6478 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6480 if (TREE_CONSTANT_OVERFLOW (exp))
6481 return BIGGEST_ALIGNMENT;
6484 /* Note: tree_low_cst is intentionally not used here,
6485 we don't care about the upper bits. */
6486 c0 = TREE_INT_CST_LOW (exp);
6488 return c0 ? c0 : BIGGEST_ALIGNMENT;
6492 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6493 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6494 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6495 return MIN (c0, c1);
6498 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6499 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6502 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6504 if (integer_pow2p (TREE_OPERAND (exp, 1))
6505 && host_integerp (TREE_OPERAND (exp, 1), 1))
6507 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6508 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6509 return MAX (1, c0 / c1);
6513 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6514 case SAVE_EXPR: case WITH_RECORD_EXPR:
6515 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6518 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6521 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6522 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6523 return MIN (c0, c1);
6532 /* Similar, except that it is known that the expression must be a multiple
6533 of the alignment of TYPE. */
6535 static unsigned HOST_WIDE_INT
6536 highest_pow2_factor_for_type (type, exp)
6540 unsigned HOST_WIDE_INT type_align, factor;
6542 factor = highest_pow2_factor (exp);
6543 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6544 return MAX (factor, type_align);
6547 /* Return an object on the placeholder list that matches EXP, a
6548 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6549 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6550 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6551 is a location which initially points to a starting location in the
6552 placeholder list (zero means start of the list) and where a pointer into
6553 the placeholder list at which the object is found is placed. */
6556 find_placeholder (exp, plist)
6560 tree type = TREE_TYPE (exp);
6561 tree placeholder_expr;
6563 for (placeholder_expr
6564 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6565 placeholder_expr != 0;
6566 placeholder_expr = TREE_CHAIN (placeholder_expr))
6568 tree need_type = TYPE_MAIN_VARIANT (type);
6571 /* Find the outermost reference that is of the type we want. If none,
6572 see if any object has a type that is a pointer to the type we
6574 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6575 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6576 || TREE_CODE (elt) == COND_EXPR)
6577 ? TREE_OPERAND (elt, 1)
6578 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6579 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6580 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6581 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6582 ? TREE_OPERAND (elt, 0) : 0))
6583 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6586 *plist = placeholder_expr;
6590 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6592 = ((TREE_CODE (elt) == COMPOUND_EXPR
6593 || TREE_CODE (elt) == COND_EXPR)
6594 ? TREE_OPERAND (elt, 1)
6595 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6596 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6597 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6598 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6599 ? TREE_OPERAND (elt, 0) : 0))
6600 if (POINTER_TYPE_P (TREE_TYPE (elt))
6601 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6605 *plist = placeholder_expr;
6606 return build1 (INDIRECT_REF, need_type, elt);
6613 /* expand_expr: generate code for computing expression EXP.
6614 An rtx for the computed value is returned. The value is never null.
6615 In the case of a void EXP, const0_rtx is returned.
6617 The value may be stored in TARGET if TARGET is nonzero.
6618 TARGET is just a suggestion; callers must assume that
6619 the rtx returned may not be the same as TARGET.
6621 If TARGET is CONST0_RTX, it means that the value will be ignored.
6623 If TMODE is not VOIDmode, it suggests generating the
6624 result in mode TMODE. But this is done only when convenient.
6625 Otherwise, TMODE is ignored and the value generated in its natural mode.
6626 TMODE is just a suggestion; callers must assume that
6627 the rtx returned may not have mode TMODE.
6629 Note that TARGET may have neither TMODE nor MODE. In that case, it
6630 probably will not be used.
6632 If MODIFIER is EXPAND_SUM then when EXP is an addition
6633 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6634 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6635 products as above, or REG or MEM, or constant.
6636 Ordinarily in such cases we would output mul or add instructions
6637 and then return a pseudo reg containing the sum.
6639 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6640 it also marks a label as absolutely required (it can't be dead).
6641 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6642 This is used for outputting expressions used in initializers.
6644 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6645 with a constant address even if that address is not normally legitimate.
6646 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6648 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6649 a call parameter. Such targets require special care as we haven't yet
6650 marked TARGET so that it's safe from being trashed by libcalls. We
6651 don't want to use TARGET for anything but the final result;
6652 Intermediate values must go elsewhere. Additionally, calls to
6653 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6656 expand_expr (exp, target, tmode, modifier)
6659 enum machine_mode tmode;
6660 enum expand_modifier modifier;
6663 tree type = TREE_TYPE (exp);
6664 int unsignedp = TREE_UNSIGNED (type);
6665 enum machine_mode mode;
6666 enum tree_code code = TREE_CODE (exp);
6668 rtx subtarget, original_target;
6672 /* Handle ERROR_MARK before anybody tries to access its type. */
6673 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6675 op0 = CONST0_RTX (tmode);
6681 mode = TYPE_MODE (type);
6682 /* Use subtarget as the target for operand 0 of a binary operation. */
6683 subtarget = get_subtarget (target);
6684 original_target = target;
6685 ignore = (target == const0_rtx
6686 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6687 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6688 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6689 && TREE_CODE (type) == VOID_TYPE));
6691 /* If we are going to ignore this result, we need only do something
6692 if there is a side-effect somewhere in the expression. If there
6693 is, short-circuit the most common cases here. Note that we must
6694 not call expand_expr with anything but const0_rtx in case this
6695 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6699 if (! TREE_SIDE_EFFECTS (exp))
6702 /* Ensure we reference a volatile object even if value is ignored, but
6703 don't do this if all we are doing is taking its address. */
6704 if (TREE_THIS_VOLATILE (exp)
6705 && TREE_CODE (exp) != FUNCTION_DECL
6706 && mode != VOIDmode && mode != BLKmode
6707 && modifier != EXPAND_CONST_ADDRESS)
6709 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6710 if (GET_CODE (temp) == MEM)
6711 temp = copy_to_reg (temp);
6715 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6716 || code == INDIRECT_REF || code == BUFFER_REF)
6717 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6720 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6721 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6724 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6727 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6729 /* If the second operand has no side effects, just evaluate
6731 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6733 else if (code == BIT_FIELD_REF)
6735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6736 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6737 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6744 #ifdef MAX_INTEGER_COMPUTATION_MODE
6745 /* Only check stuff here if the mode we want is different from the mode
6746 of the expression; if it's the same, check_max_integer_computation_mode
6747 will handle it. Do we really need to check this stuff at all? */
6750 && GET_MODE (target) != mode
6751 && TREE_CODE (exp) != INTEGER_CST
6752 && TREE_CODE (exp) != PARM_DECL
6753 && TREE_CODE (exp) != ARRAY_REF
6754 && TREE_CODE (exp) != ARRAY_RANGE_REF
6755 && TREE_CODE (exp) != COMPONENT_REF
6756 && TREE_CODE (exp) != BIT_FIELD_REF
6757 && TREE_CODE (exp) != INDIRECT_REF
6758 && TREE_CODE (exp) != CALL_EXPR
6759 && TREE_CODE (exp) != VAR_DECL
6760 && TREE_CODE (exp) != RTL_EXPR)
6762 enum machine_mode mode = GET_MODE (target);
6764 if (GET_MODE_CLASS (mode) == MODE_INT
6765 && mode > MAX_INTEGER_COMPUTATION_MODE)
6766 internal_error ("unsupported wide integer operation");
6770 && TREE_CODE (exp) != INTEGER_CST
6771 && TREE_CODE (exp) != PARM_DECL
6772 && TREE_CODE (exp) != ARRAY_REF
6773 && TREE_CODE (exp) != ARRAY_RANGE_REF
6774 && TREE_CODE (exp) != COMPONENT_REF
6775 && TREE_CODE (exp) != BIT_FIELD_REF
6776 && TREE_CODE (exp) != INDIRECT_REF
6777 && TREE_CODE (exp) != VAR_DECL
6778 && TREE_CODE (exp) != CALL_EXPR
6779 && TREE_CODE (exp) != RTL_EXPR
6780 && GET_MODE_CLASS (tmode) == MODE_INT
6781 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6782 internal_error ("unsupported wide integer operation");
6784 check_max_integer_computation_mode (exp);
6787 /* If will do cse, generate all results into pseudo registers
6788 since 1) that allows cse to find more things
6789 and 2) otherwise cse could produce an insn the machine
6790 cannot support. An exception is a CONSTRUCTOR into a multi-word
6791 MEM: that's much more likely to be most efficient into the MEM.
6792 Another is a CALL_EXPR which must return in memory. */
6794 if (! cse_not_expected && mode != BLKmode && target
6795 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6796 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6797 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6804 tree function = decl_function_context (exp);
6805 /* Labels in containing functions, or labels used from initializers,
6807 if (modifier == EXPAND_INITIALIZER
6808 || (function != current_function_decl
6809 && function != inline_function_decl
6811 temp = force_label_rtx (exp);
6813 temp = label_rtx (exp);
6815 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6816 if (function != current_function_decl
6817 && function != inline_function_decl && function != 0)
6818 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6823 if (!DECL_RTL_SET_P (exp))
6825 error_with_decl (exp, "prior parameter's size depends on `%s'");
6826 return CONST0_RTX (mode);
6829 /* ... fall through ... */
6832 /* If a static var's type was incomplete when the decl was written,
6833 but the type is complete now, lay out the decl now. */
6834 if (DECL_SIZE (exp) == 0
6835 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6836 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6837 layout_decl (exp, 0);
6839 /* ... fall through ... */
6843 if (DECL_RTL (exp) == 0)
6846 /* Ensure variable marked as used even if it doesn't go through
6847 a parser. If it hasn't be used yet, write out an external
6849 if (! TREE_USED (exp))
6851 assemble_external (exp);
6852 TREE_USED (exp) = 1;
6855 /* Show we haven't gotten RTL for this yet. */
6858 /* Handle variables inherited from containing functions. */
6859 context = decl_function_context (exp);
6861 /* We treat inline_function_decl as an alias for the current function
6862 because that is the inline function whose vars, types, etc.
6863 are being merged into the current function.
6864 See expand_inline_function. */
6866 if (context != 0 && context != current_function_decl
6867 && context != inline_function_decl
6868 /* If var is static, we don't need a static chain to access it. */
6869 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6870 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6874 /* Mark as non-local and addressable. */
6875 DECL_NONLOCAL (exp) = 1;
6876 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6878 (*lang_hooks.mark_addressable) (exp);
6879 if (GET_CODE (DECL_RTL (exp)) != MEM)
6881 addr = XEXP (DECL_RTL (exp), 0);
6882 if (GET_CODE (addr) == MEM)
6884 = replace_equiv_address (addr,
6885 fix_lexical_addr (XEXP (addr, 0), exp));
6887 addr = fix_lexical_addr (addr, exp);
6889 temp = replace_equiv_address (DECL_RTL (exp), addr);
6892 /* This is the case of an array whose size is to be determined
6893 from its initializer, while the initializer is still being parsed.
6896 else if (GET_CODE (DECL_RTL (exp)) == MEM
6897 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6898 temp = validize_mem (DECL_RTL (exp));
6900 /* If DECL_RTL is memory, we are in the normal case and either
6901 the address is not valid or it is not a register and -fforce-addr
6902 is specified, get the address into a register. */
6904 else if (GET_CODE (DECL_RTL (exp)) == MEM
6905 && modifier != EXPAND_CONST_ADDRESS
6906 && modifier != EXPAND_SUM
6907 && modifier != EXPAND_INITIALIZER
6908 && (! memory_address_p (DECL_MODE (exp),
6909 XEXP (DECL_RTL (exp), 0))
6911 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6912 temp = replace_equiv_address (DECL_RTL (exp),
6913 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6915 /* If we got something, return it. But first, set the alignment
6916 if the address is a register. */
6919 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6920 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6925 /* If the mode of DECL_RTL does not match that of the decl, it
6926 must be a promoted value. We return a SUBREG of the wanted mode,
6927 but mark it so that we know that it was already extended. */
6929 if (GET_CODE (DECL_RTL (exp)) == REG
6930 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6932 /* Get the signedness used for this variable. Ensure we get the
6933 same mode we got when the variable was declared. */
6934 if (GET_MODE (DECL_RTL (exp))
6935 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6936 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6939 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6940 SUBREG_PROMOTED_VAR_P (temp) = 1;
6941 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6945 return DECL_RTL (exp);
6948 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6949 TREE_INT_CST_HIGH (exp), mode);
6951 /* ??? If overflow is set, fold will have done an incomplete job,
6952 which can result in (plus xx (const_int 0)), which can get
6953 simplified by validate_replace_rtx during virtual register
6954 instantiation, which can result in unrecognizable insns.
6955 Avoid this by forcing all overflows into registers. */
6956 if (TREE_CONSTANT_OVERFLOW (exp)
6957 && modifier != EXPAND_INITIALIZER)
6958 temp = force_reg (mode, temp);
6963 return const_vector_from_tree (exp);
6966 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6969 /* If optimized, generate immediate CONST_DOUBLE
6970 which will be turned into memory by reload if necessary.
6972 We used to force a register so that loop.c could see it. But
6973 this does not allow gen_* patterns to perform optimizations with
6974 the constants. It also produces two insns in cases like "x = 1.0;".
6975 On most machines, floating-point constants are not permitted in
6976 many insns, so we'd end up copying it to a register in any case.
6978 Now, we do the copying in expand_binop, if appropriate. */
6979 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6980 TYPE_MODE (TREE_TYPE (exp)));
6984 temp = output_constant_def (exp, 1);
6986 /* temp contains a constant address.
6987 On RISC machines where a constant address isn't valid,
6988 make some insns to get that address into a register. */
6989 if (modifier != EXPAND_CONST_ADDRESS
6990 && modifier != EXPAND_INITIALIZER
6991 && modifier != EXPAND_SUM
6992 && (! memory_address_p (mode, XEXP (temp, 0))
6993 || flag_force_addr))
6994 return replace_equiv_address (temp,
6995 copy_rtx (XEXP (temp, 0)));
6998 case EXPR_WITH_FILE_LOCATION:
7001 location_t saved_loc = input_location;
7002 input_filename = EXPR_WFL_FILENAME (exp);
7003 input_line = EXPR_WFL_LINENO (exp);
7004 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
7005 emit_line_note (input_filename, input_line);
7006 /* Possibly avoid switching back and forth here. */
7007 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
7008 input_location = saved_loc;
7013 context = decl_function_context (exp);
7015 /* If this SAVE_EXPR was at global context, assume we are an
7016 initialization function and move it into our context. */
7018 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
7020 /* We treat inline_function_decl as an alias for the current function
7021 because that is the inline function whose vars, types, etc.
7022 are being merged into the current function.
7023 See expand_inline_function. */
7024 if (context == current_function_decl || context == inline_function_decl)
7027 /* If this is non-local, handle it. */
7030 /* The following call just exists to abort if the context is
7031 not of a containing function. */
7032 find_function_data (context);
7034 temp = SAVE_EXPR_RTL (exp);
7035 if (temp && GET_CODE (temp) == REG)
7037 put_var_into_stack (exp, /*rescan=*/true);
7038 temp = SAVE_EXPR_RTL (exp);
7040 if (temp == 0 || GET_CODE (temp) != MEM)
7043 replace_equiv_address (temp,
7044 fix_lexical_addr (XEXP (temp, 0), exp));
7046 if (SAVE_EXPR_RTL (exp) == 0)
7048 if (mode == VOIDmode)
7051 temp = assign_temp (build_qualified_type (type,
7053 | TYPE_QUAL_CONST)),
7056 SAVE_EXPR_RTL (exp) = temp;
7057 if (!optimize && GET_CODE (temp) == REG)
7058 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7061 /* If the mode of TEMP does not match that of the expression, it
7062 must be a promoted value. We pass store_expr a SUBREG of the
7063 wanted mode but mark it so that we know that it was already
7066 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7068 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7069 promote_mode (type, mode, &unsignedp, 0);
7070 SUBREG_PROMOTED_VAR_P (temp) = 1;
7071 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7074 if (temp == const0_rtx)
7075 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7077 store_expr (TREE_OPERAND (exp, 0), temp,
7078 modifier == EXPAND_STACK_PARM ? 2 : 0);
7080 TREE_USED (exp) = 1;
7083 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7084 must be a promoted value. We return a SUBREG of the wanted mode,
7085 but mark it so that we know that it was already extended. */
7087 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7088 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7090 /* Compute the signedness and make the proper SUBREG. */
7091 promote_mode (type, mode, &unsignedp, 0);
7092 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7093 SUBREG_PROMOTED_VAR_P (temp) = 1;
7094 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7098 return SAVE_EXPR_RTL (exp);
7103 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7104 TREE_OPERAND (exp, 0)
7105 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7109 case PLACEHOLDER_EXPR:
7111 tree old_list = placeholder_list;
7112 tree placeholder_expr = 0;
7114 exp = find_placeholder (exp, &placeholder_expr);
7118 placeholder_list = TREE_CHAIN (placeholder_expr);
7119 temp = expand_expr (exp, original_target, tmode, modifier);
7120 placeholder_list = old_list;
7124 case WITH_RECORD_EXPR:
7125 /* Put the object on the placeholder list, expand our first operand,
7126 and pop the list. */
7127 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7129 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7131 placeholder_list = TREE_CHAIN (placeholder_list);
7135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7136 expand_goto (TREE_OPERAND (exp, 0));
7138 expand_computed_goto (TREE_OPERAND (exp, 0));
7142 expand_exit_loop_if_false (NULL,
7143 invert_truthvalue (TREE_OPERAND (exp, 0)));
7146 case LABELED_BLOCK_EXPR:
7147 if (LABELED_BLOCK_BODY (exp))
7148 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7149 /* Should perhaps use expand_label, but this is simpler and safer. */
7150 do_pending_stack_adjust ();
7151 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7154 case EXIT_BLOCK_EXPR:
7155 if (EXIT_BLOCK_RETURN (exp))
7156 sorry ("returned value in block_exit_expr");
7157 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7162 expand_start_loop (1);
7163 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7171 tree vars = TREE_OPERAND (exp, 0);
7173 /* Need to open a binding contour here because
7174 if there are any cleanups they must be contained here. */
7175 expand_start_bindings (2);
7177 /* Mark the corresponding BLOCK for output in its proper place. */
7178 if (TREE_OPERAND (exp, 2) != 0
7179 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7180 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7182 /* If VARS have not yet been expanded, expand them now. */
7185 if (!DECL_RTL_SET_P (vars))
7187 expand_decl_init (vars);
7188 vars = TREE_CHAIN (vars);
7191 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7193 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7199 if (RTL_EXPR_SEQUENCE (exp))
7201 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7203 emit_insn (RTL_EXPR_SEQUENCE (exp));
7204 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7206 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7207 free_temps_for_rtl_expr (exp);
7208 return RTL_EXPR_RTL (exp);
7211 /* If we don't need the result, just ensure we evaluate any
7217 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7218 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7223 /* All elts simple constants => refer to a constant in memory. But
7224 if this is a non-BLKmode mode, let it store a field at a time
7225 since that should make a CONST_INT or CONST_DOUBLE when we
7226 fold. Likewise, if we have a target we can use, it is best to
7227 store directly into the target unless the type is large enough
7228 that memcpy will be used. If we are making an initializer and
7229 all operands are constant, put it in memory as well.
7231 FIXME: Avoid trying to fill vector constructors piece-meal.
7232 Output them with output_constant_def below unless we're sure
7233 they're zeros. This should go away when vector initializers
7234 are treated like VECTOR_CST instead of arrays.
7236 else if ((TREE_STATIC (exp)
7237 && ((mode == BLKmode
7238 && ! (target != 0 && safe_from_p (target, exp, 1)))
7239 || TREE_ADDRESSABLE (exp)
7240 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7241 && (! MOVE_BY_PIECES_P
7242 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7244 && ((TREE_CODE (type) == VECTOR_TYPE
7245 && !is_zeros_p (exp))
7246 || ! mostly_zeros_p (exp)))))
7247 || ((modifier == EXPAND_INITIALIZER
7248 || modifier == EXPAND_CONST_ADDRESS)
7249 && TREE_CONSTANT (exp)))
7251 rtx constructor = output_constant_def (exp, 1);
7253 if (modifier != EXPAND_CONST_ADDRESS
7254 && modifier != EXPAND_INITIALIZER
7255 && modifier != EXPAND_SUM)
7256 constructor = validize_mem (constructor);
7262 /* Handle calls that pass values in multiple non-contiguous
7263 locations. The Irix 6 ABI has examples of this. */
7264 if (target == 0 || ! safe_from_p (target, exp, 1)
7265 || GET_CODE (target) == PARALLEL
7266 || modifier == EXPAND_STACK_PARM)
7268 = assign_temp (build_qualified_type (type,
7270 | (TREE_READONLY (exp)
7271 * TYPE_QUAL_CONST))),
7272 0, TREE_ADDRESSABLE (exp), 1);
7274 store_constructor (exp, target, 0, int_expr_size (exp));
7280 tree exp1 = TREE_OPERAND (exp, 0);
7282 tree string = string_constant (exp1, &index);
7284 /* Try to optimize reads from const strings. */
7286 && TREE_CODE (string) == STRING_CST
7287 && TREE_CODE (index) == INTEGER_CST
7288 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7289 && GET_MODE_CLASS (mode) == MODE_INT
7290 && GET_MODE_SIZE (mode) == 1
7291 && modifier != EXPAND_WRITE)
7292 return gen_int_mode (TREE_STRING_POINTER (string)
7293 [TREE_INT_CST_LOW (index)], mode);
7295 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7296 op0 = memory_address (mode, op0);
7297 temp = gen_rtx_MEM (mode, op0);
7298 set_mem_attributes (temp, exp, 0);
7300 /* If we are writing to this object and its type is a record with
7301 readonly fields, we must mark it as readonly so it will
7302 conflict with readonly references to those fields. */
7303 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7304 RTX_UNCHANGING_P (temp) = 1;
7310 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7314 tree array = TREE_OPERAND (exp, 0);
7315 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7316 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7317 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7320 /* Optimize the special-case of a zero lower bound.
7322 We convert the low_bound to sizetype to avoid some problems
7323 with constant folding. (E.g. suppose the lower bound is 1,
7324 and its mode is QI. Without the conversion, (ARRAY
7325 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7326 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7328 if (! integer_zerop (low_bound))
7329 index = size_diffop (index, convert (sizetype, low_bound));
7331 /* Fold an expression like: "foo"[2].
7332 This is not done in fold so it won't happen inside &.
7333 Don't fold if this is for wide characters since it's too
7334 difficult to do correctly and this is a very rare case. */
7336 if (modifier != EXPAND_CONST_ADDRESS
7337 && modifier != EXPAND_INITIALIZER
7338 && modifier != EXPAND_MEMORY
7339 && TREE_CODE (array) == STRING_CST
7340 && TREE_CODE (index) == INTEGER_CST
7341 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7342 && GET_MODE_CLASS (mode) == MODE_INT
7343 && GET_MODE_SIZE (mode) == 1)
7344 return gen_int_mode (TREE_STRING_POINTER (array)
7345 [TREE_INT_CST_LOW (index)], mode);
7347 /* If this is a constant index into a constant array,
7348 just get the value from the array. Handle both the cases when
7349 we have an explicit constructor and when our operand is a variable
7350 that was declared const. */
7352 if (modifier != EXPAND_CONST_ADDRESS
7353 && modifier != EXPAND_INITIALIZER
7354 && modifier != EXPAND_MEMORY
7355 && TREE_CODE (array) == CONSTRUCTOR
7356 && ! TREE_SIDE_EFFECTS (array)
7357 && TREE_CODE (index) == INTEGER_CST
7358 && 0 > compare_tree_int (index,
7359 list_length (CONSTRUCTOR_ELTS
7360 (TREE_OPERAND (exp, 0)))))
7364 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7365 i = TREE_INT_CST_LOW (index);
7366 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7370 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7374 else if (optimize >= 1
7375 && modifier != EXPAND_CONST_ADDRESS
7376 && modifier != EXPAND_INITIALIZER
7377 && modifier != EXPAND_MEMORY
7378 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7379 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7380 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7382 if (TREE_CODE (index) == INTEGER_CST)
7384 tree init = DECL_INITIAL (array);
7386 if (TREE_CODE (init) == CONSTRUCTOR)
7390 for (elem = CONSTRUCTOR_ELTS (init);
7392 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7393 elem = TREE_CHAIN (elem))
7396 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7397 return expand_expr (fold (TREE_VALUE (elem)), target,
7400 else if (TREE_CODE (init) == STRING_CST
7401 && 0 > compare_tree_int (index,
7402 TREE_STRING_LENGTH (init)))
7404 tree type = TREE_TYPE (TREE_TYPE (init));
7405 enum machine_mode mode = TYPE_MODE (type);
7407 if (GET_MODE_CLASS (mode) == MODE_INT
7408 && GET_MODE_SIZE (mode) == 1)
7409 return gen_int_mode (TREE_STRING_POINTER (init)
7410 [TREE_INT_CST_LOW (index)], mode);
7415 goto normal_inner_ref;
7418 /* If the operand is a CONSTRUCTOR, we can just extract the
7419 appropriate field if it is present. */
7420 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7424 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7425 elt = TREE_CHAIN (elt))
7426 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7427 /* We can normally use the value of the field in the
7428 CONSTRUCTOR. However, if this is a bitfield in
7429 an integral mode that we can fit in a HOST_WIDE_INT,
7430 we must mask only the number of bits in the bitfield,
7431 since this is done implicitly by the constructor. If
7432 the bitfield does not meet either of those conditions,
7433 we can't do this optimization. */
7434 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7435 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7437 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7438 <= HOST_BITS_PER_WIDE_INT))))
7440 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7441 && modifier == EXPAND_STACK_PARM)
7443 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7444 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7446 HOST_WIDE_INT bitsize
7447 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7448 enum machine_mode imode
7449 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7451 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7453 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7454 op0 = expand_and (imode, op0, op1, target);
7459 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7462 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7464 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7472 goto normal_inner_ref;
7475 case ARRAY_RANGE_REF:
7478 enum machine_mode mode1;
7479 HOST_WIDE_INT bitsize, bitpos;
7482 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7483 &mode1, &unsignedp, &volatilep);
7486 /* If we got back the original object, something is wrong. Perhaps
7487 we are evaluating an expression too early. In any event, don't
7488 infinitely recurse. */
7492 /* If TEM's type is a union of variable size, pass TARGET to the inner
7493 computation, since it will need a temporary and TARGET is known
7494 to have to do. This occurs in unchecked conversion in Ada. */
7498 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7499 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7501 && modifier != EXPAND_STACK_PARM
7502 ? target : NULL_RTX),
7504 (modifier == EXPAND_INITIALIZER
7505 || modifier == EXPAND_CONST_ADDRESS
7506 || modifier == EXPAND_STACK_PARM)
7507 ? modifier : EXPAND_NORMAL);
7509 /* If this is a constant, put it into a register if it is a
7510 legitimate constant and OFFSET is 0 and memory if it isn't. */
7511 if (CONSTANT_P (op0))
7513 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7514 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7516 op0 = force_reg (mode, op0);
7518 op0 = validize_mem (force_const_mem (mode, op0));
7523 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7526 /* If this object is in a register, put it into memory.
7527 This case can't occur in C, but can in Ada if we have
7528 unchecked conversion of an expression from a scalar type to
7529 an array or record type. */
7530 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7531 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7533 /* If the operand is a SAVE_EXPR, we can deal with this by
7534 forcing the SAVE_EXPR into memory. */
7535 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7537 put_var_into_stack (TREE_OPERAND (exp, 0),
7539 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7544 = build_qualified_type (TREE_TYPE (tem),
7545 (TYPE_QUALS (TREE_TYPE (tem))
7546 | TYPE_QUAL_CONST));
7547 rtx memloc = assign_temp (nt, 1, 1, 1);
7549 emit_move_insn (memloc, op0);
7554 if (GET_CODE (op0) != MEM)
7557 #ifdef POINTERS_EXTEND_UNSIGNED
7558 if (GET_MODE (offset_rtx) != Pmode)
7559 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7561 if (GET_MODE (offset_rtx) != ptr_mode)
7562 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7565 /* A constant address in OP0 can have VOIDmode, we must not try
7566 to call force_reg for that case. Avoid that case. */
7567 if (GET_CODE (op0) == MEM
7568 && GET_MODE (op0) == BLKmode
7569 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7571 && (bitpos % bitsize) == 0
7572 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7573 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7575 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7579 op0 = offset_address (op0, offset_rtx,
7580 highest_pow2_factor (offset));
7583 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7584 record its alignment as BIGGEST_ALIGNMENT. */
7585 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7586 && is_aligning_offset (offset, tem))
7587 set_mem_align (op0, BIGGEST_ALIGNMENT);
7589 /* Don't forget about volatility even if this is a bitfield. */
7590 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7592 if (op0 == orig_op0)
7593 op0 = copy_rtx (op0);
7595 MEM_VOLATILE_P (op0) = 1;
7598 /* The following code doesn't handle CONCAT.
7599 Assume only bitpos == 0 can be used for CONCAT, due to
7600 one element arrays having the same mode as its element. */
7601 if (GET_CODE (op0) == CONCAT)
7603 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7608 /* In cases where an aligned union has an unaligned object
7609 as a field, we might be extracting a BLKmode value from
7610 an integer-mode (e.g., SImode) object. Handle this case
7611 by doing the extract into an object as wide as the field
7612 (which we know to be the width of a basic mode), then
7613 storing into memory, and changing the mode to BLKmode. */
7614 if (mode1 == VOIDmode
7615 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7616 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7617 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7618 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7619 && modifier != EXPAND_CONST_ADDRESS
7620 && modifier != EXPAND_INITIALIZER)
7621 /* If the field isn't aligned enough to fetch as a memref,
7622 fetch it as a bit field. */
7623 || (mode1 != BLKmode
7624 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7625 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7626 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7627 || (bitpos % BITS_PER_UNIT != 0)))
7628 /* If the type and the field are a constant size and the
7629 size of the type isn't the same size as the bitfield,
7630 we must use bitfield operations. */
7632 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7634 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7637 enum machine_mode ext_mode = mode;
7639 if (ext_mode == BLKmode
7640 && ! (target != 0 && GET_CODE (op0) == MEM
7641 && GET_CODE (target) == MEM
7642 && bitpos % BITS_PER_UNIT == 0))
7643 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7645 if (ext_mode == BLKmode)
7647 /* In this case, BITPOS must start at a byte boundary and
7648 TARGET, if specified, must be a MEM. */
7649 if (GET_CODE (op0) != MEM
7650 || (target != 0 && GET_CODE (target) != MEM)
7651 || bitpos % BITS_PER_UNIT != 0)
7654 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7656 target = assign_temp (type, 0, 1, 1);
7658 emit_block_move (target, op0,
7659 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7661 (modifier == EXPAND_STACK_PARM
7662 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7667 op0 = validize_mem (op0);
7669 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7670 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7672 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7673 (modifier == EXPAND_STACK_PARM
7674 ? NULL_RTX : target),
7676 int_size_in_bytes (TREE_TYPE (tem)));
7678 /* If the result is a record type and BITSIZE is narrower than
7679 the mode of OP0, an integral mode, and this is a big endian
7680 machine, we must put the field into the high-order bits. */
7681 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7682 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7683 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7684 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7685 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7689 if (mode == BLKmode)
7691 rtx new = assign_temp (build_qualified_type
7692 ((*lang_hooks.types.type_for_mode)
7694 TYPE_QUAL_CONST), 0, 1, 1);
7696 emit_move_insn (new, op0);
7697 op0 = copy_rtx (new);
7698 PUT_MODE (op0, BLKmode);
7699 set_mem_attributes (op0, exp, 1);
7705 /* If the result is BLKmode, use that to access the object
7707 if (mode == BLKmode)
7710 /* Get a reference to just this component. */
7711 if (modifier == EXPAND_CONST_ADDRESS
7712 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7713 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7715 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7717 if (op0 == orig_op0)
7718 op0 = copy_rtx (op0);
7720 set_mem_attributes (op0, exp, 0);
7721 if (GET_CODE (XEXP (op0, 0)) == REG)
7722 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7724 MEM_VOLATILE_P (op0) |= volatilep;
7725 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7726 || modifier == EXPAND_CONST_ADDRESS
7727 || modifier == EXPAND_INITIALIZER)
7729 else if (target == 0)
7730 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7732 convert_move (target, op0, unsignedp);
7738 rtx insn, before = get_last_insn (), vtbl_ref;
7740 /* Evaluate the interior expression. */
7741 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7744 /* Get or create an instruction off which to hang a note. */
7745 if (REG_P (subtarget))
7748 insn = get_last_insn ();
7751 if (! INSN_P (insn))
7752 insn = prev_nonnote_insn (insn);
7756 target = gen_reg_rtx (GET_MODE (subtarget));
7757 insn = emit_move_insn (target, subtarget);
7760 /* Collect the data for the note. */
7761 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7762 vtbl_ref = plus_constant (vtbl_ref,
7763 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7764 /* Discard the initial CONST that was added. */
7765 vtbl_ref = XEXP (vtbl_ref, 0);
7768 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7773 /* Intended for a reference to a buffer of a file-object in Pascal.
7774 But it's not certain that a special tree code will really be
7775 necessary for these. INDIRECT_REF might work for them. */
7781 /* Pascal set IN expression.
7784 rlo = set_low - (set_low%bits_per_word);
7785 the_word = set [ (index - rlo)/bits_per_word ];
7786 bit_index = index % bits_per_word;
7787 bitmask = 1 << bit_index;
7788 return !!(the_word & bitmask); */
7790 tree set = TREE_OPERAND (exp, 0);
7791 tree index = TREE_OPERAND (exp, 1);
7792 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7793 tree set_type = TREE_TYPE (set);
7794 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7795 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7796 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7797 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7798 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7799 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7800 rtx setaddr = XEXP (setval, 0);
7801 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7803 rtx diff, quo, rem, addr, bit, result;
7805 /* If domain is empty, answer is no. Likewise if index is constant
7806 and out of bounds. */
7807 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7808 && TREE_CODE (set_low_bound) == INTEGER_CST
7809 && tree_int_cst_lt (set_high_bound, set_low_bound))
7810 || (TREE_CODE (index) == INTEGER_CST
7811 && TREE_CODE (set_low_bound) == INTEGER_CST
7812 && tree_int_cst_lt (index, set_low_bound))
7813 || (TREE_CODE (set_high_bound) == INTEGER_CST
7814 && TREE_CODE (index) == INTEGER_CST
7815 && tree_int_cst_lt (set_high_bound, index))))
7819 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7821 /* If we get here, we have to generate the code for both cases
7822 (in range and out of range). */
7824 op0 = gen_label_rtx ();
7825 op1 = gen_label_rtx ();
7827 if (! (GET_CODE (index_val) == CONST_INT
7828 && GET_CODE (lo_r) == CONST_INT))
7829 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7830 GET_MODE (index_val), iunsignedp, op1);
7832 if (! (GET_CODE (index_val) == CONST_INT
7833 && GET_CODE (hi_r) == CONST_INT))
7834 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7835 GET_MODE (index_val), iunsignedp, op1);
7837 /* Calculate the element number of bit zero in the first word
7839 if (GET_CODE (lo_r) == CONST_INT)
7840 rlow = GEN_INT (INTVAL (lo_r)
7841 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7843 rlow = expand_binop (index_mode, and_optab, lo_r,
7844 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7845 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7847 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7848 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7850 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7851 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7852 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7853 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7855 addr = memory_address (byte_mode,
7856 expand_binop (index_mode, add_optab, diff,
7857 setaddr, NULL_RTX, iunsignedp,
7860 /* Extract the bit we want to examine. */
7861 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7862 gen_rtx_MEM (byte_mode, addr),
7863 make_tree (TREE_TYPE (index), rem),
7865 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7866 GET_MODE (target) == byte_mode ? target : 0,
7867 1, OPTAB_LIB_WIDEN);
7869 if (result != target)
7870 convert_move (target, result, 1);
7872 /* Output the code to handle the out-of-range case. */
7875 emit_move_insn (target, const0_rtx);
7880 case WITH_CLEANUP_EXPR:
7881 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7883 WITH_CLEANUP_EXPR_RTL (exp)
7884 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7885 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7886 CLEANUP_EH_ONLY (exp));
7888 /* That's it for this cleanup. */
7889 TREE_OPERAND (exp, 1) = 0;
7891 return WITH_CLEANUP_EXPR_RTL (exp);
7893 case CLEANUP_POINT_EXPR:
7895 /* Start a new binding layer that will keep track of all cleanup
7896 actions to be performed. */
7897 expand_start_bindings (2);
7899 target_temp_slot_level = temp_slot_level;
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7902 /* If we're going to use this value, load it up now. */
7904 op0 = force_not_mem (op0);
7905 preserve_temp_slots (op0);
7906 expand_end_bindings (NULL_TREE, 0, 0);
7911 /* Check for a built-in function. */
7912 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7913 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7915 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7917 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7918 == BUILT_IN_FRONTEND)
7919 return (*lang_hooks.expand_expr) (exp, original_target,
7922 return expand_builtin (exp, target, subtarget, tmode, ignore);
7925 return expand_call (exp, target, ignore);
7927 case NON_LVALUE_EXPR:
7930 case REFERENCE_EXPR:
7931 if (TREE_OPERAND (exp, 0) == error_mark_node)
7934 if (TREE_CODE (type) == UNION_TYPE)
7936 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7938 /* If both input and output are BLKmode, this conversion isn't doing
7939 anything except possibly changing memory attribute. */
7940 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7942 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7945 result = copy_rtx (result);
7946 set_mem_attributes (result, exp, 0);
7951 target = assign_temp (type, 0, 1, 1);
7953 if (GET_CODE (target) == MEM)
7954 /* Store data into beginning of memory target. */
7955 store_expr (TREE_OPERAND (exp, 0),
7956 adjust_address (target, TYPE_MODE (valtype), 0),
7957 modifier == EXPAND_STACK_PARM ? 2 : 0);
7959 else if (GET_CODE (target) == REG)
7960 /* Store this field into a union of the proper type. */
7961 store_field (target,
7962 MIN ((int_size_in_bytes (TREE_TYPE
7963 (TREE_OPERAND (exp, 0)))
7965 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7966 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7967 VOIDmode, 0, type, 0);
7971 /* Return the entire union. */
7975 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7977 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7980 /* If the signedness of the conversion differs and OP0 is
7981 a promoted SUBREG, clear that indication since we now
7982 have to do the proper extension. */
7983 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7984 && GET_CODE (op0) == SUBREG)
7985 SUBREG_PROMOTED_VAR_P (op0) = 0;
7990 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7991 if (GET_MODE (op0) == mode)
7994 /* If OP0 is a constant, just convert it into the proper mode. */
7995 if (CONSTANT_P (op0))
7997 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7998 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8000 if (modifier == EXPAND_INITIALIZER)
8001 return simplify_gen_subreg (mode, op0, inner_mode,
8002 subreg_lowpart_offset (mode,
8005 return convert_modes (mode, inner_mode, op0,
8006 TREE_UNSIGNED (inner_type));
8009 if (modifier == EXPAND_INITIALIZER)
8010 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8014 convert_to_mode (mode, op0,
8015 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8017 convert_move (target, op0,
8018 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8021 case VIEW_CONVERT_EXPR:
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8024 /* If the input and output modes are both the same, we are done.
8025 Otherwise, if neither mode is BLKmode and both are integral and within
8026 a word, we can use gen_lowpart. If neither is true, make sure the
8027 operand is in memory and convert the MEM to the new mode. */
8028 if (TYPE_MODE (type) == GET_MODE (op0))
8030 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8031 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8032 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8033 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8034 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8035 op0 = gen_lowpart (TYPE_MODE (type), op0);
8036 else if (GET_CODE (op0) != MEM)
8038 /* If the operand is not a MEM, force it into memory. Since we
8039 are going to be be changing the mode of the MEM, don't call
8040 force_const_mem for constants because we don't allow pool
8041 constants to change mode. */
8042 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8044 if (TREE_ADDRESSABLE (exp))
8047 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8049 = assign_stack_temp_for_type
8050 (TYPE_MODE (inner_type),
8051 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8053 emit_move_insn (target, op0);
8057 /* At this point, OP0 is in the correct mode. If the output type is such
8058 that the operand is known to be aligned, indicate that it is.
8059 Otherwise, we need only be concerned about alignment for non-BLKmode
8061 if (GET_CODE (op0) == MEM)
8063 op0 = copy_rtx (op0);
8065 if (TYPE_ALIGN_OK (type))
8066 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8067 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8068 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8070 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8071 HOST_WIDE_INT temp_size
8072 = MAX (int_size_in_bytes (inner_type),
8073 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8074 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8075 temp_size, 0, type);
8076 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8078 if (TREE_ADDRESSABLE (exp))
8081 if (GET_MODE (op0) == BLKmode)
8082 emit_block_move (new_with_op0_mode, op0,
8083 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8084 (modifier == EXPAND_STACK_PARM
8085 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8087 emit_move_insn (new_with_op0_mode, op0);
8092 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8098 this_optab = ! unsignedp && flag_trapv
8099 && (GET_MODE_CLASS (mode) == MODE_INT)
8100 ? addv_optab : add_optab;
8102 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8103 something else, make sure we add the register to the constant and
8104 then to the other thing. This case can occur during strength
8105 reduction and doing it this way will produce better code if the
8106 frame pointer or argument pointer is eliminated.
8108 fold-const.c will ensure that the constant is always in the inner
8109 PLUS_EXPR, so the only case we need to do anything about is if
8110 sp, ap, or fp is our second argument, in which case we must swap
8111 the innermost first argument and our second argument. */
8113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8114 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8115 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8116 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8117 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8118 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8120 tree t = TREE_OPERAND (exp, 1);
8122 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8123 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8126 /* If the result is to be ptr_mode and we are adding an integer to
8127 something, we might be forming a constant. So try to use
8128 plus_constant. If it produces a sum and we can't accept it,
8129 use force_operand. This allows P = &ARR[const] to generate
8130 efficient code on machines where a SYMBOL_REF is not a valid
8133 If this is an EXPAND_SUM call, always return the sum. */
8134 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8135 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8137 if (modifier == EXPAND_STACK_PARM)
8139 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8140 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8141 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8145 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8147 /* Use immed_double_const to ensure that the constant is
8148 truncated according to the mode of OP1, then sign extended
8149 to a HOST_WIDE_INT. Using the constant directly can result
8150 in non-canonical RTL in a 64x32 cross compile. */
8152 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8154 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8155 op1 = plus_constant (op1, INTVAL (constant_part));
8156 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8157 op1 = force_operand (op1, target);
8161 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8162 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8163 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8168 (modifier == EXPAND_INITIALIZER
8169 ? EXPAND_INITIALIZER : EXPAND_SUM));
8170 if (! CONSTANT_P (op0))
8172 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8173 VOIDmode, modifier);
8174 /* Don't go to both_summands if modifier
8175 says it's not right to return a PLUS. */
8176 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8180 /* Use immed_double_const to ensure that the constant is
8181 truncated according to the mode of OP1, then sign extended
8182 to a HOST_WIDE_INT. Using the constant directly can result
8183 in non-canonical RTL in a 64x32 cross compile. */
8185 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8187 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8188 op0 = plus_constant (op0, INTVAL (constant_part));
8189 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8190 op0 = force_operand (op0, target);
8195 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8198 /* No sense saving up arithmetic to be done
8199 if it's all in the wrong mode to form part of an address.
8200 And force_operand won't know whether to sign-extend or
8202 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8203 || mode != ptr_mode)
8205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8206 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8207 if (op0 == const0_rtx)
8209 if (op1 == const0_rtx)
8214 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8215 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8217 /* We come here from MINUS_EXPR when the second operand is a
8220 /* Make sure any term that's a sum with a constant comes last. */
8221 if (GET_CODE (op0) == PLUS
8222 && CONSTANT_P (XEXP (op0, 1)))
8228 /* If adding to a sum including a constant,
8229 associate it to put the constant outside. */
8230 if (GET_CODE (op1) == PLUS
8231 && CONSTANT_P (XEXP (op1, 1)))
8233 rtx constant_term = const0_rtx;
8235 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8238 /* Ensure that MULT comes first if there is one. */
8239 else if (GET_CODE (op0) == MULT)
8240 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8242 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8244 /* Let's also eliminate constants from op0 if possible. */
8245 op0 = eliminate_constant_term (op0, &constant_term);
8247 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8248 their sum should be a constant. Form it into OP1, since the
8249 result we want will then be OP0 + OP1. */
8251 temp = simplify_binary_operation (PLUS, mode, constant_term,
8256 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8259 /* Put a constant term last and put a multiplication first. */
8260 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8261 temp = op1, op1 = op0, op0 = temp;
8263 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8264 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8267 /* For initializers, we are allowed to return a MINUS of two
8268 symbolic constants. Here we handle all cases when both operands
8270 /* Handle difference of two symbolic constants,
8271 for the sake of an initializer. */
8272 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8273 && really_constant_p (TREE_OPERAND (exp, 0))
8274 && really_constant_p (TREE_OPERAND (exp, 1)))
8276 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8278 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8281 /* If the last operand is a CONST_INT, use plus_constant of
8282 the negated constant. Else make the MINUS. */
8283 if (GET_CODE (op1) == CONST_INT)
8284 return plus_constant (op0, - INTVAL (op1));
8286 return gen_rtx_MINUS (mode, op0, op1);
8289 this_optab = ! unsignedp && flag_trapv
8290 && (GET_MODE_CLASS(mode) == MODE_INT)
8291 ? subv_optab : sub_optab;
8293 /* No sense saving up arithmetic to be done
8294 if it's all in the wrong mode to form part of an address.
8295 And force_operand won't know whether to sign-extend or
8297 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8298 || mode != ptr_mode)
8301 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8307 /* Convert A - const to A + (-const). */
8308 if (GET_CODE (op1) == CONST_INT)
8310 op1 = negate_rtx (mode, op1);
8317 /* If first operand is constant, swap them.
8318 Thus the following special case checks need only
8319 check the second operand. */
8320 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8322 tree t1 = TREE_OPERAND (exp, 0);
8323 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8324 TREE_OPERAND (exp, 1) = t1;
8327 /* Attempt to return something suitable for generating an
8328 indexed address, for machines that support that. */
8330 if (modifier == EXPAND_SUM && mode == ptr_mode
8331 && host_integerp (TREE_OPERAND (exp, 1), 0))
8333 tree exp1 = TREE_OPERAND (exp, 1);
8335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8338 /* If we knew for certain that this is arithmetic for an array
8339 reference, and we knew the bounds of the array, then we could
8340 apply the distributive law across (PLUS X C) for constant C.
8341 Without such knowledge, we risk overflowing the computation
8342 when both X and C are large, but X+C isn't. */
8343 /* ??? Could perhaps special-case EXP being unsigned and C being
8344 positive. In that case we are certain that X+C is no smaller
8345 than X and so the transformed expression will overflow iff the
8346 original would have. */
8348 if (GET_CODE (op0) != REG)
8349 op0 = force_operand (op0, NULL_RTX);
8350 if (GET_CODE (op0) != REG)
8351 op0 = copy_to_mode_reg (mode, op0);
8353 return gen_rtx_MULT (mode, op0,
8354 gen_int_mode (tree_low_cst (exp1, 0),
8355 TYPE_MODE (TREE_TYPE (exp1))));
8358 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8361 if (modifier == EXPAND_STACK_PARM)
8364 /* Check for multiplying things that have been extended
8365 from a narrower type. If this machine supports multiplying
8366 in that narrower type with a result in the desired type,
8367 do it that way, and avoid the explicit type-conversion. */
8368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8369 && TREE_CODE (type) == INTEGER_TYPE
8370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8371 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8372 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8373 && int_fits_type_p (TREE_OPERAND (exp, 1),
8374 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8375 /* Don't use a widening multiply if a shift will do. */
8376 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8377 > HOST_BITS_PER_WIDE_INT)
8378 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8380 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8383 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8384 /* If both operands are extended, they must either both
8385 be zero-extended or both be sign-extended. */
8386 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8388 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8390 enum machine_mode innermode
8391 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8392 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8393 ? smul_widen_optab : umul_widen_optab);
8394 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8395 ? umul_widen_optab : smul_widen_optab);
8396 if (mode == GET_MODE_WIDER_MODE (innermode))
8398 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8400 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8401 NULL_RTX, VOIDmode, 0);
8402 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8406 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8407 NULL_RTX, VOIDmode, 0);
8410 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8411 && innermode == word_mode)
8414 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8415 NULL_RTX, VOIDmode, 0);
8416 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8417 op1 = convert_modes (innermode, mode,
8418 expand_expr (TREE_OPERAND (exp, 1),
8419 NULL_RTX, VOIDmode, 0),
8422 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8423 NULL_RTX, VOIDmode, 0);
8424 temp = expand_binop (mode, other_optab, op0, op1, target,
8425 unsignedp, OPTAB_LIB_WIDEN);
8426 htem = expand_mult_highpart_adjust (innermode,
8427 gen_highpart (innermode, temp),
8429 gen_highpart (innermode, temp),
8431 emit_move_insn (gen_highpart (innermode, temp), htem);
8436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8438 return expand_mult (mode, op0, op1, target, unsignedp);
8440 case TRUNC_DIV_EXPR:
8441 case FLOOR_DIV_EXPR:
8443 case ROUND_DIV_EXPR:
8444 case EXACT_DIV_EXPR:
8445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8447 if (modifier == EXPAND_STACK_PARM)
8449 /* Possible optimization: compute the dividend with EXPAND_SUM
8450 then if the divisor is constant can optimize the case
8451 where some terms of the dividend have coeffs divisible by it. */
8452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8453 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8454 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8457 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8458 expensive divide. If not, combine will rebuild the original
8460 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8461 && TREE_CODE (type) == REAL_TYPE
8462 && !real_onep (TREE_OPERAND (exp, 0)))
8463 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8464 build (RDIV_EXPR, type,
8465 build_real (type, dconst1),
8466 TREE_OPERAND (exp, 1))),
8467 target, tmode, modifier);
8468 this_optab = sdiv_optab;
8471 case TRUNC_MOD_EXPR:
8472 case FLOOR_MOD_EXPR:
8474 case ROUND_MOD_EXPR:
8475 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8477 if (modifier == EXPAND_STACK_PARM)
8479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8480 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8481 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8483 case FIX_ROUND_EXPR:
8484 case FIX_FLOOR_EXPR:
8486 abort (); /* Not used for C. */
8488 case FIX_TRUNC_EXPR:
8489 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8490 if (target == 0 || modifier == EXPAND_STACK_PARM)
8491 target = gen_reg_rtx (mode);
8492 expand_fix (target, op0, unsignedp);
8496 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8497 if (target == 0 || modifier == EXPAND_STACK_PARM)
8498 target = gen_reg_rtx (mode);
8499 /* expand_float can't figure out what to do if FROM has VOIDmode.
8500 So give it the correct mode. With -O, cse will optimize this. */
8501 if (GET_MODE (op0) == VOIDmode)
8502 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8504 expand_float (target, op0,
8505 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8509 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8510 if (modifier == EXPAND_STACK_PARM)
8512 temp = expand_unop (mode,
8513 ! unsignedp && flag_trapv
8514 && (GET_MODE_CLASS(mode) == MODE_INT)
8515 ? negv_optab : neg_optab, op0, target, 0);
8521 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8522 if (modifier == EXPAND_STACK_PARM)
8525 /* Handle complex values specially. */
8526 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8527 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8528 return expand_complex_abs (mode, op0, target, unsignedp);
8530 /* Unsigned abs is simply the operand. Testing here means we don't
8531 risk generating incorrect code below. */
8532 if (TREE_UNSIGNED (type))
8535 return expand_abs (mode, op0, target, unsignedp,
8536 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8540 target = original_target;
8542 || modifier == EXPAND_STACK_PARM
8543 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8544 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8545 || GET_MODE (target) != mode
8546 || (GET_CODE (target) == REG
8547 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8548 target = gen_reg_rtx (mode);
8549 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8550 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8552 /* First try to do it with a special MIN or MAX instruction.
8553 If that does not win, use a conditional jump to select the proper
8555 this_optab = (TREE_UNSIGNED (type)
8556 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8557 : (code == MIN_EXPR ? smin_optab : smax_optab));
8559 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8564 /* At this point, a MEM target is no longer useful; we will get better
8567 if (GET_CODE (target) == MEM)
8568 target = gen_reg_rtx (mode);
8571 emit_move_insn (target, op0);
8573 op0 = gen_label_rtx ();
8575 /* If this mode is an integer too wide to compare properly,
8576 compare word by word. Rely on cse to optimize constant cases. */
8577 if (GET_MODE_CLASS (mode) == MODE_INT
8578 && ! can_compare_p (GE, mode, ccp_jump))
8580 if (code == MAX_EXPR)
8581 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8582 target, op1, NULL_RTX, op0);
8584 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8585 op1, target, NULL_RTX, op0);
8589 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8590 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8591 unsignedp, mode, NULL_RTX, NULL_RTX,
8594 emit_move_insn (target, op1);
8599 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8600 if (modifier == EXPAND_STACK_PARM)
8602 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8608 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8609 if (modifier == EXPAND_STACK_PARM)
8611 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8618 temp = expand_unop (mode, clz_optab, op0, target, 1);
8624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8625 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8631 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8632 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8638 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8639 temp = expand_unop (mode, parity_optab, op0, target, 1);
8644 /* ??? Can optimize bitwise operations with one arg constant.
8645 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8646 and (a bitwise1 b) bitwise2 b (etc)
8647 but that is probably not worth while. */
8649 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8650 boolean values when we want in all cases to compute both of them. In
8651 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8652 as actual zero-or-1 values and then bitwise anding. In cases where
8653 there cannot be any side effects, better code would be made by
8654 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8655 how to recognize those cases. */
8657 case TRUTH_AND_EXPR:
8659 this_optab = and_optab;
8664 this_optab = ior_optab;
8667 case TRUTH_XOR_EXPR:
8669 this_optab = xor_optab;
8676 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8678 if (modifier == EXPAND_STACK_PARM)
8680 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8681 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8684 /* Could determine the answer when only additive constants differ. Also,
8685 the addition of one can be handled by changing the condition. */
8692 case UNORDERED_EXPR:
8699 temp = do_store_flag (exp,
8700 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8701 tmode != VOIDmode ? tmode : mode, 0);
8705 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8706 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8708 && GET_CODE (original_target) == REG
8709 && (GET_MODE (original_target)
8710 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8712 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8715 /* If temp is constant, we can just compute the result. */
8716 if (GET_CODE (temp) == CONST_INT)
8718 if (INTVAL (temp) != 0)
8719 emit_move_insn (target, const1_rtx);
8721 emit_move_insn (target, const0_rtx);
8726 if (temp != original_target)
8728 enum machine_mode mode1 = GET_MODE (temp);
8729 if (mode1 == VOIDmode)
8730 mode1 = tmode != VOIDmode ? tmode : mode;
8732 temp = copy_to_mode_reg (mode1, temp);
8735 op1 = gen_label_rtx ();
8736 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8737 GET_MODE (temp), unsignedp, op1);
8738 emit_move_insn (temp, const1_rtx);
8743 /* If no set-flag instruction, must generate a conditional
8744 store into a temporary variable. Drop through
8745 and handle this like && and ||. */
8747 case TRUTH_ANDIF_EXPR:
8748 case TRUTH_ORIF_EXPR:
8751 || modifier == EXPAND_STACK_PARM
8752 || ! safe_from_p (target, exp, 1)
8753 /* Make sure we don't have a hard reg (such as function's return
8754 value) live across basic blocks, if not optimizing. */
8755 || (!optimize && GET_CODE (target) == REG
8756 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8757 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8760 emit_clr_insn (target);
8762 op1 = gen_label_rtx ();
8763 jumpifnot (exp, op1);
8766 emit_0_to_1_insn (target);
8769 return ignore ? const0_rtx : target;
8771 case TRUTH_NOT_EXPR:
8772 if (modifier == EXPAND_STACK_PARM)
8774 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8775 /* The parser is careful to generate TRUTH_NOT_EXPR
8776 only with operands that are always zero or one. */
8777 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8778 target, 1, OPTAB_LIB_WIDEN);
8784 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8786 return expand_expr (TREE_OPERAND (exp, 1),
8787 (ignore ? const0_rtx : target),
8788 VOIDmode, modifier);
8791 /* If we would have a "singleton" (see below) were it not for a
8792 conversion in each arm, bring that conversion back out. */
8793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8794 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8795 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8796 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8798 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8799 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8801 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8802 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8803 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8804 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8805 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8806 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8807 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8808 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8809 return expand_expr (build1 (NOP_EXPR, type,
8810 build (COND_EXPR, TREE_TYPE (iftrue),
8811 TREE_OPERAND (exp, 0),
8813 target, tmode, modifier);
8817 /* Note that COND_EXPRs whose type is a structure or union
8818 are required to be constructed to contain assignments of
8819 a temporary variable, so that we can evaluate them here
8820 for side effect only. If type is void, we must do likewise. */
8822 /* If an arm of the branch requires a cleanup,
8823 only that cleanup is performed. */
8826 tree binary_op = 0, unary_op = 0;
8828 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8829 convert it to our mode, if necessary. */
8830 if (integer_onep (TREE_OPERAND (exp, 1))
8831 && integer_zerop (TREE_OPERAND (exp, 2))
8832 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8836 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8841 if (modifier == EXPAND_STACK_PARM)
8843 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8844 if (GET_MODE (op0) == mode)
8848 target = gen_reg_rtx (mode);
8849 convert_move (target, op0, unsignedp);
8853 /* Check for X ? A + B : A. If we have this, we can copy A to the
8854 output and conditionally add B. Similarly for unary operations.
8855 Don't do this if X has side-effects because those side effects
8856 might affect A or B and the "?" operation is a sequence point in
8857 ANSI. (operand_equal_p tests for side effects.) */
8859 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8860 && operand_equal_p (TREE_OPERAND (exp, 2),
8861 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8862 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8863 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8864 && operand_equal_p (TREE_OPERAND (exp, 1),
8865 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8866 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8867 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8868 && operand_equal_p (TREE_OPERAND (exp, 2),
8869 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8870 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8871 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8872 && operand_equal_p (TREE_OPERAND (exp, 1),
8873 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8874 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8876 /* If we are not to produce a result, we have no target. Otherwise,
8877 if a target was specified use it; it will not be used as an
8878 intermediate target unless it is safe. If no target, use a
8883 else if (modifier == EXPAND_STACK_PARM)
8884 temp = assign_temp (type, 0, 0, 1);
8885 else if (original_target
8886 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8887 || (singleton && GET_CODE (original_target) == REG
8888 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8889 && original_target == var_rtx (singleton)))
8890 && GET_MODE (original_target) == mode
8891 #ifdef HAVE_conditional_move
8892 && (! can_conditionally_move_p (mode)
8893 || GET_CODE (original_target) == REG
8894 || TREE_ADDRESSABLE (type))
8896 && (GET_CODE (original_target) != MEM
8897 || TREE_ADDRESSABLE (type)))
8898 temp = original_target;
8899 else if (TREE_ADDRESSABLE (type))
8902 temp = assign_temp (type, 0, 0, 1);
8904 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8905 do the test of X as a store-flag operation, do this as
8906 A + ((X != 0) << log C). Similarly for other simple binary
8907 operators. Only do for C == 1 if BRANCH_COST is low. */
8908 if (temp && singleton && binary_op
8909 && (TREE_CODE (binary_op) == PLUS_EXPR
8910 || TREE_CODE (binary_op) == MINUS_EXPR
8911 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8912 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8913 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8914 : integer_onep (TREE_OPERAND (binary_op, 1)))
8915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8919 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8920 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8921 ? addv_optab : add_optab)
8922 : TREE_CODE (binary_op) == MINUS_EXPR
8923 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8924 ? subv_optab : sub_optab)
8925 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8928 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8929 if (singleton == TREE_OPERAND (exp, 1))
8930 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8932 cond = TREE_OPERAND (exp, 0);
8934 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8936 mode, BRANCH_COST <= 1);
8938 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8939 result = expand_shift (LSHIFT_EXPR, mode, result,
8940 build_int_2 (tree_log2
8944 (safe_from_p (temp, singleton, 1)
8945 ? temp : NULL_RTX), 0);
8949 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8950 return expand_binop (mode, boptab, op1, result, temp,
8951 unsignedp, OPTAB_LIB_WIDEN);
8955 do_pending_stack_adjust ();
8957 op0 = gen_label_rtx ();
8959 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8963 /* If the target conflicts with the other operand of the
8964 binary op, we can't use it. Also, we can't use the target
8965 if it is a hard register, because evaluating the condition
8966 might clobber it. */
8968 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8969 || (GET_CODE (temp) == REG
8970 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8971 temp = gen_reg_rtx (mode);
8972 store_expr (singleton, temp,
8973 modifier == EXPAND_STACK_PARM ? 2 : 0);
8976 expand_expr (singleton,
8977 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8978 if (singleton == TREE_OPERAND (exp, 1))
8979 jumpif (TREE_OPERAND (exp, 0), op0);
8981 jumpifnot (TREE_OPERAND (exp, 0), op0);
8983 start_cleanup_deferral ();
8984 if (binary_op && temp == 0)
8985 /* Just touch the other operand. */
8986 expand_expr (TREE_OPERAND (binary_op, 1),
8987 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8989 store_expr (build (TREE_CODE (binary_op), type,
8990 make_tree (type, temp),
8991 TREE_OPERAND (binary_op, 1)),
8992 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8994 store_expr (build1 (TREE_CODE (unary_op), type,
8995 make_tree (type, temp)),
8996 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8999 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
9000 comparison operator. If we have one of these cases, set the
9001 output to A, branch on A (cse will merge these two references),
9002 then set the output to FOO. */
9004 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9005 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9006 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9007 TREE_OPERAND (exp, 1), 0)
9008 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9009 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
9010 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
9012 if (GET_CODE (temp) == REG
9013 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9014 temp = gen_reg_rtx (mode);
9015 store_expr (TREE_OPERAND (exp, 1), temp,
9016 modifier == EXPAND_STACK_PARM ? 2 : 0);
9017 jumpif (TREE_OPERAND (exp, 0), op0);
9019 start_cleanup_deferral ();
9020 store_expr (TREE_OPERAND (exp, 2), temp,
9021 modifier == EXPAND_STACK_PARM ? 2 : 0);
9025 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9026 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9028 TREE_OPERAND (exp, 2), 0)
9029 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9030 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
9031 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
9033 if (GET_CODE (temp) == REG
9034 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9035 temp = gen_reg_rtx (mode);
9036 store_expr (TREE_OPERAND (exp, 2), temp,
9037 modifier == EXPAND_STACK_PARM ? 2 : 0);
9038 jumpifnot (TREE_OPERAND (exp, 0), op0);
9040 start_cleanup_deferral ();
9041 store_expr (TREE_OPERAND (exp, 1), temp,
9042 modifier == EXPAND_STACK_PARM ? 2 : 0);
9047 op1 = gen_label_rtx ();
9048 jumpifnot (TREE_OPERAND (exp, 0), op0);
9050 start_cleanup_deferral ();
9052 /* One branch of the cond can be void, if it never returns. For
9053 example A ? throw : E */
9055 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9056 store_expr (TREE_OPERAND (exp, 1), temp,
9057 modifier == EXPAND_STACK_PARM ? 2 : 0);
9059 expand_expr (TREE_OPERAND (exp, 1),
9060 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9061 end_cleanup_deferral ();
9063 emit_jump_insn (gen_jump (op1));
9066 start_cleanup_deferral ();
9068 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9069 store_expr (TREE_OPERAND (exp, 2), temp,
9070 modifier == EXPAND_STACK_PARM ? 2 : 0);
9072 expand_expr (TREE_OPERAND (exp, 2),
9073 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9076 end_cleanup_deferral ();
9087 /* Something needs to be initialized, but we didn't know
9088 where that thing was when building the tree. For example,
9089 it could be the return value of a function, or a parameter
9090 to a function which lays down in the stack, or a temporary
9091 variable which must be passed by reference.
9093 We guarantee that the expression will either be constructed
9094 or copied into our original target. */
9096 tree slot = TREE_OPERAND (exp, 0);
9097 tree cleanups = NULL_TREE;
9100 if (TREE_CODE (slot) != VAR_DECL)
9104 target = original_target;
9106 /* Set this here so that if we get a target that refers to a
9107 register variable that's already been used, put_reg_into_stack
9108 knows that it should fix up those uses. */
9109 TREE_USED (slot) = 1;
9113 if (DECL_RTL_SET_P (slot))
9115 target = DECL_RTL (slot);
9116 /* If we have already expanded the slot, so don't do
9118 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9123 target = assign_temp (type, 2, 0, 1);
9124 /* All temp slots at this level must not conflict. */
9125 preserve_temp_slots (target);
9126 SET_DECL_RTL (slot, target);
9127 if (TREE_ADDRESSABLE (slot))
9128 put_var_into_stack (slot, /*rescan=*/false);
9130 /* Since SLOT is not known to the called function
9131 to belong to its stack frame, we must build an explicit
9132 cleanup. This case occurs when we must build up a reference
9133 to pass the reference as an argument. In this case,
9134 it is very likely that such a reference need not be
9137 if (TREE_OPERAND (exp, 2) == 0)
9138 TREE_OPERAND (exp, 2)
9139 = (*lang_hooks.maybe_build_cleanup) (slot);
9140 cleanups = TREE_OPERAND (exp, 2);
9145 /* This case does occur, when expanding a parameter which
9146 needs to be constructed on the stack. The target
9147 is the actual stack address that we want to initialize.
9148 The function we call will perform the cleanup in this case. */
9150 /* If we have already assigned it space, use that space,
9151 not target that we were passed in, as our target
9152 parameter is only a hint. */
9153 if (DECL_RTL_SET_P (slot))
9155 target = DECL_RTL (slot);
9156 /* If we have already expanded the slot, so don't do
9158 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9163 SET_DECL_RTL (slot, target);
9164 /* If we must have an addressable slot, then make sure that
9165 the RTL that we just stored in slot is OK. */
9166 if (TREE_ADDRESSABLE (slot))
9167 put_var_into_stack (slot, /*rescan=*/true);
9171 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9172 /* Mark it as expanded. */
9173 TREE_OPERAND (exp, 1) = NULL_TREE;
9175 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9177 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9184 tree lhs = TREE_OPERAND (exp, 0);
9185 tree rhs = TREE_OPERAND (exp, 1);
9187 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9193 /* If lhs is complex, expand calls in rhs before computing it.
9194 That's so we don't compute a pointer and save it over a
9195 call. If lhs is simple, compute it first so we can give it
9196 as a target if the rhs is just a call. This avoids an
9197 extra temp and copy and that prevents a partial-subsumption
9198 which makes bad code. Actually we could treat
9199 component_ref's of vars like vars. */
9201 tree lhs = TREE_OPERAND (exp, 0);
9202 tree rhs = TREE_OPERAND (exp, 1);
9206 /* Check for |= or &= of a bitfield of size one into another bitfield
9207 of size 1. In this case, (unless we need the result of the
9208 assignment) we can do this more efficiently with a
9209 test followed by an assignment, if necessary.
9211 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9212 things change so we do, this code should be enhanced to
9215 && TREE_CODE (lhs) == COMPONENT_REF
9216 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9217 || TREE_CODE (rhs) == BIT_AND_EXPR)
9218 && TREE_OPERAND (rhs, 0) == lhs
9219 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9220 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9221 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9223 rtx label = gen_label_rtx ();
9225 do_jump (TREE_OPERAND (rhs, 1),
9226 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9227 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9228 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9229 (TREE_CODE (rhs) == BIT_IOR_EXPR
9231 : integer_zero_node)),
9233 do_pending_stack_adjust ();
9238 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9244 if (!TREE_OPERAND (exp, 0))
9245 expand_null_return ();
9247 expand_return (TREE_OPERAND (exp, 0));
9250 case PREINCREMENT_EXPR:
9251 case PREDECREMENT_EXPR:
9252 return expand_increment (exp, 0, ignore);
9254 case POSTINCREMENT_EXPR:
9255 case POSTDECREMENT_EXPR:
9256 /* Faster to treat as pre-increment if result is not used. */
9257 return expand_increment (exp, ! ignore, ignore);
9260 if (modifier == EXPAND_STACK_PARM)
9262 /* Are we taking the address of a nested function? */
9263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9264 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9265 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9266 && ! TREE_STATIC (exp))
9268 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9269 op0 = force_operand (op0, target);
9271 /* If we are taking the address of something erroneous, just
9273 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9275 /* If we are taking the address of a constant and are at the
9276 top level, we have to use output_constant_def since we can't
9277 call force_const_mem at top level. */
9279 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9280 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9282 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9285 /* We make sure to pass const0_rtx down if we came in with
9286 ignore set, to avoid doing the cleanups twice for something. */
9287 op0 = expand_expr (TREE_OPERAND (exp, 0),
9288 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9289 (modifier == EXPAND_INITIALIZER
9290 ? modifier : EXPAND_CONST_ADDRESS));
9292 /* If we are going to ignore the result, OP0 will have been set
9293 to const0_rtx, so just return it. Don't get confused and
9294 think we are taking the address of the constant. */
9298 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9299 clever and returns a REG when given a MEM. */
9300 op0 = protect_from_queue (op0, 1);
9302 /* We would like the object in memory. If it is a constant, we can
9303 have it be statically allocated into memory. For a non-constant,
9304 we need to allocate some memory and store the value into it. */
9306 if (CONSTANT_P (op0))
9307 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9309 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9310 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9311 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9313 /* If the operand is a SAVE_EXPR, we can deal with this by
9314 forcing the SAVE_EXPR into memory. */
9315 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9317 put_var_into_stack (TREE_OPERAND (exp, 0),
9319 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9323 /* If this object is in a register, it can't be BLKmode. */
9324 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9325 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9327 if (GET_CODE (op0) == PARALLEL)
9328 /* Handle calls that pass values in multiple
9329 non-contiguous locations. The Irix 6 ABI has examples
9331 emit_group_store (memloc, op0,
9332 int_size_in_bytes (inner_type));
9334 emit_move_insn (memloc, op0);
9340 if (GET_CODE (op0) != MEM)
9343 mark_temp_addr_taken (op0);
9344 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9346 op0 = XEXP (op0, 0);
9347 #ifdef POINTERS_EXTEND_UNSIGNED
9348 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9349 && mode == ptr_mode)
9350 op0 = convert_memory_address (ptr_mode, op0);
9355 /* If OP0 is not aligned as least as much as the type requires, we
9356 need to make a temporary, copy OP0 to it, and take the address of
9357 the temporary. We want to use the alignment of the type, not of
9358 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9359 the test for BLKmode means that can't happen. The test for
9360 BLKmode is because we never make mis-aligned MEMs with
9363 We don't need to do this at all if the machine doesn't have
9364 strict alignment. */
9365 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9366 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9368 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9370 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9373 if (TYPE_ALIGN_OK (inner_type))
9376 if (TREE_ADDRESSABLE (inner_type))
9378 /* We can't make a bitwise copy of this object, so fail. */
9379 error ("cannot take the address of an unaligned member");
9383 new = assign_stack_temp_for_type
9384 (TYPE_MODE (inner_type),
9385 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9386 : int_size_in_bytes (inner_type),
9387 1, build_qualified_type (inner_type,
9388 (TYPE_QUALS (inner_type)
9389 | TYPE_QUAL_CONST)));
9391 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9392 (modifier == EXPAND_STACK_PARM
9393 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9398 op0 = force_operand (XEXP (op0, 0), target);
9402 && GET_CODE (op0) != REG
9403 && modifier != EXPAND_CONST_ADDRESS
9404 && modifier != EXPAND_INITIALIZER
9405 && modifier != EXPAND_SUM)
9406 op0 = force_reg (Pmode, op0);
9408 if (GET_CODE (op0) == REG
9409 && ! REG_USERVAR_P (op0))
9410 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9412 #ifdef POINTERS_EXTEND_UNSIGNED
9413 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9414 && mode == ptr_mode)
9415 op0 = convert_memory_address (ptr_mode, op0);
9420 case ENTRY_VALUE_EXPR:
9423 /* COMPLEX type for Extended Pascal & Fortran */
9426 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9429 /* Get the rtx code of the operands. */
9430 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9431 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9434 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9438 /* Move the real (op0) and imaginary (op1) parts to their location. */
9439 emit_move_insn (gen_realpart (mode, target), op0);
9440 emit_move_insn (gen_imagpart (mode, target), op1);
9442 insns = get_insns ();
9445 /* Complex construction should appear as a single unit. */
9446 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9447 each with a separate pseudo as destination.
9448 It's not correct for flow to treat them as a unit. */
9449 if (GET_CODE (target) != CONCAT)
9450 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9458 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9459 return gen_realpart (mode, op0);
9462 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9463 return gen_imagpart (mode, op0);
9467 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9471 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9474 target = gen_reg_rtx (mode);
9478 /* Store the realpart and the negated imagpart to target. */
9479 emit_move_insn (gen_realpart (partmode, target),
9480 gen_realpart (partmode, op0));
9482 imag_t = gen_imagpart (partmode, target);
9483 temp = expand_unop (partmode,
9484 ! unsignedp && flag_trapv
9485 && (GET_MODE_CLASS(partmode) == MODE_INT)
9486 ? negv_optab : neg_optab,
9487 gen_imagpart (partmode, op0), imag_t, 0);
9489 emit_move_insn (imag_t, temp);
9491 insns = get_insns ();
9494 /* Conjugate should appear as a single unit
9495 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9496 each with a separate pseudo as destination.
9497 It's not correct for flow to treat them as a unit. */
9498 if (GET_CODE (target) != CONCAT)
9499 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9506 case TRY_CATCH_EXPR:
9508 tree handler = TREE_OPERAND (exp, 1);
9510 expand_eh_region_start ();
9512 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9514 expand_eh_region_end_cleanup (handler);
9519 case TRY_FINALLY_EXPR:
9521 tree try_block = TREE_OPERAND (exp, 0);
9522 tree finally_block = TREE_OPERAND (exp, 1);
9524 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9526 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9527 is not sufficient, so we cannot expand the block twice.
9528 So we play games with GOTO_SUBROUTINE_EXPR to let us
9529 expand the thing only once. */
9530 /* When not optimizing, we go ahead with this form since
9531 (1) user breakpoints operate more predictably without
9532 code duplication, and
9533 (2) we're not running any of the global optimizers
9534 that would explode in time/space with the highly
9535 connected CFG created by the indirect branching. */
9537 rtx finally_label = gen_label_rtx ();
9538 rtx done_label = gen_label_rtx ();
9539 rtx return_link = gen_reg_rtx (Pmode);
9540 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9541 (tree) finally_label, (tree) return_link);
9542 TREE_SIDE_EFFECTS (cleanup) = 1;
9544 /* Start a new binding layer that will keep track of all cleanup
9545 actions to be performed. */
9546 expand_start_bindings (2);
9547 target_temp_slot_level = temp_slot_level;
9549 expand_decl_cleanup (NULL_TREE, cleanup);
9550 op0 = expand_expr (try_block, target, tmode, modifier);
9552 preserve_temp_slots (op0);
9553 expand_end_bindings (NULL_TREE, 0, 0);
9554 emit_jump (done_label);
9555 emit_label (finally_label);
9556 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9557 emit_indirect_jump (return_link);
9558 emit_label (done_label);
9562 expand_start_bindings (2);
9563 target_temp_slot_level = temp_slot_level;
9565 expand_decl_cleanup (NULL_TREE, finally_block);
9566 op0 = expand_expr (try_block, target, tmode, modifier);
9568 preserve_temp_slots (op0);
9569 expand_end_bindings (NULL_TREE, 0, 0);
9575 case GOTO_SUBROUTINE_EXPR:
9577 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9578 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9579 rtx return_address = gen_label_rtx ();
9580 emit_move_insn (return_link,
9581 gen_rtx_LABEL_REF (Pmode, return_address));
9583 emit_label (return_address);
9588 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9591 return get_exception_pointer (cfun);
9594 /* Function descriptors are not valid except for as
9595 initialization constants, and should not be expanded. */
9599 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9602 /* Here to do an ordinary binary operator, generating an instruction
9603 from the optab already placed in `this_optab'. */
9605 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9607 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9608 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9610 if (modifier == EXPAND_STACK_PARM)
9612 temp = expand_binop (mode, this_optab, op0, op1, target,
9613 unsignedp, OPTAB_LIB_WIDEN);
9619 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9620 when applied to the address of EXP produces an address known to be
9621 aligned more than BIGGEST_ALIGNMENT. */
9624 is_aligning_offset (offset, exp)
9628 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9629 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9630 || TREE_CODE (offset) == NOP_EXPR
9631 || TREE_CODE (offset) == CONVERT_EXPR
9632 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9633 offset = TREE_OPERAND (offset, 0);
9635 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9636 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9637 if (TREE_CODE (offset) != BIT_AND_EXPR
9638 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9639 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9640 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9643 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9644 It must be NEGATE_EXPR. Then strip any more conversions. */
9645 offset = TREE_OPERAND (offset, 0);
9646 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9647 || TREE_CODE (offset) == NOP_EXPR
9648 || TREE_CODE (offset) == CONVERT_EXPR)
9649 offset = TREE_OPERAND (offset, 0);
9651 if (TREE_CODE (offset) != NEGATE_EXPR)
9654 offset = TREE_OPERAND (offset, 0);
9655 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9656 || TREE_CODE (offset) == NOP_EXPR
9657 || TREE_CODE (offset) == CONVERT_EXPR)
9658 offset = TREE_OPERAND (offset, 0);
9660 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9661 whose type is the same as EXP. */
9662 return (TREE_CODE (offset) == ADDR_EXPR
9663 && (TREE_OPERAND (offset, 0) == exp
9664 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9665 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9666 == TREE_TYPE (exp)))));
9669 /* Return the tree node if an ARG corresponds to a string constant or zero
9670 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9671 in bytes within the string that ARG is accessing. The type of the
9672 offset will be `sizetype'. */
9675 string_constant (arg, ptr_offset)
9681 if (TREE_CODE (arg) == ADDR_EXPR
9682 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9684 *ptr_offset = size_zero_node;
9685 return TREE_OPERAND (arg, 0);
9687 else if (TREE_CODE (arg) == PLUS_EXPR)
9689 tree arg0 = TREE_OPERAND (arg, 0);
9690 tree arg1 = TREE_OPERAND (arg, 1);
9695 if (TREE_CODE (arg0) == ADDR_EXPR
9696 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9698 *ptr_offset = convert (sizetype, arg1);
9699 return TREE_OPERAND (arg0, 0);
9701 else if (TREE_CODE (arg1) == ADDR_EXPR
9702 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9704 *ptr_offset = convert (sizetype, arg0);
9705 return TREE_OPERAND (arg1, 0);
9712 /* Expand code for a post- or pre- increment or decrement
9713 and return the RTX for the result.
9714 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9717 expand_increment (exp, post, ignore)
9723 tree incremented = TREE_OPERAND (exp, 0);
9724 optab this_optab = add_optab;
9726 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9727 int op0_is_copy = 0;
9728 int single_insn = 0;
9729 /* 1 means we can't store into OP0 directly,
9730 because it is a subreg narrower than a word,
9731 and we don't dare clobber the rest of the word. */
9734 /* Stabilize any component ref that might need to be
9735 evaluated more than once below. */
9737 || TREE_CODE (incremented) == BIT_FIELD_REF
9738 || (TREE_CODE (incremented) == COMPONENT_REF
9739 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9740 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9741 incremented = stabilize_reference (incremented);
9742 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9743 ones into save exprs so that they don't accidentally get evaluated
9744 more than once by the code below. */
9745 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9746 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9747 incremented = save_expr (incremented);
9749 /* Compute the operands as RTX.
9750 Note whether OP0 is the actual lvalue or a copy of it:
9751 I believe it is a copy iff it is a register or subreg
9752 and insns were generated in computing it. */
9754 temp = get_last_insn ();
9755 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9757 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9758 in place but instead must do sign- or zero-extension during assignment,
9759 so we copy it into a new register and let the code below use it as
9762 Note that we can safely modify this SUBREG since it is know not to be
9763 shared (it was made by the expand_expr call above). */
9765 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9768 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9772 else if (GET_CODE (op0) == SUBREG
9773 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9775 /* We cannot increment this SUBREG in place. If we are
9776 post-incrementing, get a copy of the old value. Otherwise,
9777 just mark that we cannot increment in place. */
9779 op0 = copy_to_reg (op0);
9784 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9785 && temp != get_last_insn ());
9786 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9788 /* Decide whether incrementing or decrementing. */
9789 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9790 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9791 this_optab = sub_optab;
9793 /* Convert decrement by a constant into a negative increment. */
9794 if (this_optab == sub_optab
9795 && GET_CODE (op1) == CONST_INT)
9797 op1 = GEN_INT (-INTVAL (op1));
9798 this_optab = add_optab;
9801 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9802 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9804 /* For a preincrement, see if we can do this with a single instruction. */
9807 icode = (int) this_optab->handlers[(int) mode].insn_code;
9808 if (icode != (int) CODE_FOR_nothing
9809 /* Make sure that OP0 is valid for operands 0 and 1
9810 of the insn we want to queue. */
9811 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9812 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9813 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9817 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9818 then we cannot just increment OP0. We must therefore contrive to
9819 increment the original value. Then, for postincrement, we can return
9820 OP0 since it is a copy of the old value. For preincrement, expand here
9821 unless we can do it with a single insn.
9823 Likewise if storing directly into OP0 would clobber high bits
9824 we need to preserve (bad_subreg). */
9825 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9827 /* This is the easiest way to increment the value wherever it is.
9828 Problems with multiple evaluation of INCREMENTED are prevented
9829 because either (1) it is a component_ref or preincrement,
9830 in which case it was stabilized above, or (2) it is an array_ref
9831 with constant index in an array in a register, which is
9832 safe to reevaluate. */
9833 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9834 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9835 ? MINUS_EXPR : PLUS_EXPR),
9838 TREE_OPERAND (exp, 1));
9840 while (TREE_CODE (incremented) == NOP_EXPR
9841 || TREE_CODE (incremented) == CONVERT_EXPR)
9843 newexp = convert (TREE_TYPE (incremented), newexp);
9844 incremented = TREE_OPERAND (incremented, 0);
9847 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9848 return post ? op0 : temp;
9853 /* We have a true reference to the value in OP0.
9854 If there is an insn to add or subtract in this mode, queue it.
9855 Queueing the increment insn avoids the register shuffling
9856 that often results if we must increment now and first save
9857 the old value for subsequent use. */
9859 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9860 op0 = stabilize (op0);
9863 icode = (int) this_optab->handlers[(int) mode].insn_code;
9864 if (icode != (int) CODE_FOR_nothing
9865 /* Make sure that OP0 is valid for operands 0 and 1
9866 of the insn we want to queue. */
9867 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9868 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9870 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9871 op1 = force_reg (mode, op1);
9873 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9875 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9877 rtx addr = (general_operand (XEXP (op0, 0), mode)
9878 ? force_reg (Pmode, XEXP (op0, 0))
9879 : copy_to_reg (XEXP (op0, 0)));
9882 op0 = replace_equiv_address (op0, addr);
9883 temp = force_reg (GET_MODE (op0), op0);
9884 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9885 op1 = force_reg (mode, op1);
9887 /* The increment queue is LIFO, thus we have to `queue'
9888 the instructions in reverse order. */
9889 enqueue_insn (op0, gen_move_insn (op0, temp));
9890 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9895 /* Preincrement, or we can't increment with one simple insn. */
9897 /* Save a copy of the value before inc or dec, to return it later. */
9898 temp = value = copy_to_reg (op0);
9900 /* Arrange to return the incremented value. */
9901 /* Copy the rtx because expand_binop will protect from the queue,
9902 and the results of that would be invalid for us to return
9903 if our caller does emit_queue before using our result. */
9904 temp = copy_rtx (value = op0);
9906 /* Increment however we can. */
9907 op1 = expand_binop (mode, this_optab, value, op1, op0,
9908 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9910 /* Make sure the value is stored into OP0. */
9912 emit_move_insn (op0, op1);
9917 /* Generate code to calculate EXP using a store-flag instruction
9918 and return an rtx for the result. EXP is either a comparison
9919 or a TRUTH_NOT_EXPR whose operand is a comparison.
9921 If TARGET is nonzero, store the result there if convenient.
9923 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9926 Return zero if there is no suitable set-flag instruction
9927 available on this machine.
9929 Once expand_expr has been called on the arguments of the comparison,
9930 we are committed to doing the store flag, since it is not safe to
9931 re-evaluate the expression. We emit the store-flag insn by calling
9932 emit_store_flag, but only expand the arguments if we have a reason
9933 to believe that emit_store_flag will be successful. If we think that
9934 it will, but it isn't, we have to simulate the store-flag with a
9935 set/jump/set sequence. */
9938 do_store_flag (exp, target, mode, only_cheap)
9941 enum machine_mode mode;
9945 tree arg0, arg1, type;
9947 enum machine_mode operand_mode;
9951 enum insn_code icode;
9952 rtx subtarget = target;
9955 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9956 result at the end. We can't simply invert the test since it would
9957 have already been inverted if it were valid. This case occurs for
9958 some floating-point comparisons. */
9960 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9961 invert = 1, exp = TREE_OPERAND (exp, 0);
9963 arg0 = TREE_OPERAND (exp, 0);
9964 arg1 = TREE_OPERAND (exp, 1);
9966 /* Don't crash if the comparison was erroneous. */
9967 if (arg0 == error_mark_node || arg1 == error_mark_node)
9970 type = TREE_TYPE (arg0);
9971 operand_mode = TYPE_MODE (type);
9972 unsignedp = TREE_UNSIGNED (type);
9974 /* We won't bother with BLKmode store-flag operations because it would mean
9975 passing a lot of information to emit_store_flag. */
9976 if (operand_mode == BLKmode)
9979 /* We won't bother with store-flag operations involving function pointers
9980 when function pointers must be canonicalized before comparisons. */
9981 #ifdef HAVE_canonicalize_funcptr_for_compare
9982 if (HAVE_canonicalize_funcptr_for_compare
9983 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9984 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9986 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9987 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9988 == FUNCTION_TYPE))))
9995 /* Get the rtx comparison code to use. We know that EXP is a comparison
9996 operation of some type. Some comparisons against 1 and -1 can be
9997 converted to comparisons with zero. Do so here so that the tests
9998 below will be aware that we have a comparison with zero. These
9999 tests will not catch constants in the first operand, but constants
10000 are rarely passed as the first operand. */
10002 switch (TREE_CODE (exp))
10011 if (integer_onep (arg1))
10012 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10014 code = unsignedp ? LTU : LT;
10017 if (! unsignedp && integer_all_onesp (arg1))
10018 arg1 = integer_zero_node, code = LT;
10020 code = unsignedp ? LEU : LE;
10023 if (! unsignedp && integer_all_onesp (arg1))
10024 arg1 = integer_zero_node, code = GE;
10026 code = unsignedp ? GTU : GT;
10029 if (integer_onep (arg1))
10030 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10032 code = unsignedp ? GEU : GE;
10035 case UNORDERED_EXPR:
10061 /* Put a constant second. */
10062 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10064 tem = arg0; arg0 = arg1; arg1 = tem;
10065 code = swap_condition (code);
10068 /* If this is an equality or inequality test of a single bit, we can
10069 do this by shifting the bit being tested to the low-order bit and
10070 masking the result with the constant 1. If the condition was EQ,
10071 we xor it with 1. This does not require an scc insn and is faster
10072 than an scc insn even if we have it. */
10074 if ((code == NE || code == EQ)
10075 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10076 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10078 tree inner = TREE_OPERAND (arg0, 0);
10079 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10082 /* If INNER is a right shift of a constant and it plus BITNUM does
10083 not overflow, adjust BITNUM and INNER. */
10085 if (TREE_CODE (inner) == RSHIFT_EXPR
10086 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10087 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10088 && bitnum < TYPE_PRECISION (type)
10089 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10090 bitnum - TYPE_PRECISION (type)))
10092 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10093 inner = TREE_OPERAND (inner, 0);
10096 /* If we are going to be able to omit the AND below, we must do our
10097 operations as unsigned. If we must use the AND, we have a choice.
10098 Normally unsigned is faster, but for some machines signed is. */
10099 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10100 #ifdef LOAD_EXTEND_OP
10101 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10107 if (! get_subtarget (subtarget)
10108 || GET_MODE (subtarget) != operand_mode
10109 || ! safe_from_p (subtarget, inner, 1))
10112 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10115 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10116 size_int (bitnum), subtarget, ops_unsignedp);
10118 if (GET_MODE (op0) != mode)
10119 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10121 if ((code == EQ && ! invert) || (code == NE && invert))
10122 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10123 ops_unsignedp, OPTAB_LIB_WIDEN);
10125 /* Put the AND last so it can combine with more things. */
10126 if (bitnum != TYPE_PRECISION (type) - 1)
10127 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10132 /* Now see if we are likely to be able to do this. Return if not. */
10133 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10136 icode = setcc_gen_code[(int) code];
10137 if (icode == CODE_FOR_nothing
10138 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10140 /* We can only do this if it is one of the special cases that
10141 can be handled without an scc insn. */
10142 if ((code == LT && integer_zerop (arg1))
10143 || (! only_cheap && code == GE && integer_zerop (arg1)))
10145 else if (BRANCH_COST >= 0
10146 && ! only_cheap && (code == NE || code == EQ)
10147 && TREE_CODE (type) != REAL_TYPE
10148 && ((abs_optab->handlers[(int) operand_mode].insn_code
10149 != CODE_FOR_nothing)
10150 || (ffs_optab->handlers[(int) operand_mode].insn_code
10151 != CODE_FOR_nothing)))
10157 if (! get_subtarget (target)
10158 || GET_MODE (subtarget) != operand_mode
10159 || ! safe_from_p (subtarget, arg1, 1))
10162 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10163 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10166 target = gen_reg_rtx (mode);
10168 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10169 because, if the emit_store_flag does anything it will succeed and
10170 OP0 and OP1 will not be used subsequently. */
10172 result = emit_store_flag (target, code,
10173 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10174 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10175 operand_mode, unsignedp, 1);
10180 result = expand_binop (mode, xor_optab, result, const1_rtx,
10181 result, 0, OPTAB_LIB_WIDEN);
10185 /* If this failed, we have to do this with set/compare/jump/set code. */
10186 if (GET_CODE (target) != REG
10187 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10188 target = gen_reg_rtx (GET_MODE (target));
10190 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10191 result = compare_from_rtx (op0, op1, code, unsignedp,
10192 operand_mode, NULL_RTX);
10193 if (GET_CODE (result) == CONST_INT)
10194 return (((result == const0_rtx && ! invert)
10195 || (result != const0_rtx && invert))
10196 ? const0_rtx : const1_rtx);
10198 /* The code of RESULT may not match CODE if compare_from_rtx
10199 decided to swap its operands and reverse the original code.
10201 We know that compare_from_rtx returns either a CONST_INT or
10202 a new comparison code, so it is safe to just extract the
10203 code from RESULT. */
10204 code = GET_CODE (result);
10206 label = gen_label_rtx ();
10207 if (bcc_gen_fctn[(int) code] == 0)
10210 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10211 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10212 emit_label (label);
10218 /* Stubs in case we haven't got a casesi insn. */
10219 #ifndef HAVE_casesi
10220 # define HAVE_casesi 0
10221 # define gen_casesi(a, b, c, d, e) (0)
10222 # define CODE_FOR_casesi CODE_FOR_nothing
10225 /* If the machine does not have a case insn that compares the bounds,
10226 this means extra overhead for dispatch tables, which raises the
10227 threshold for using them. */
10228 #ifndef CASE_VALUES_THRESHOLD
10229 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10230 #endif /* CASE_VALUES_THRESHOLD */
10233 case_values_threshold ()
10235 return CASE_VALUES_THRESHOLD;
10238 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10239 0 otherwise (i.e. if there is no casesi instruction). */
10241 try_casesi (index_type, index_expr, minval, range,
10242 table_label, default_label)
10243 tree index_type, index_expr, minval, range;
10244 rtx table_label ATTRIBUTE_UNUSED;
10247 enum machine_mode index_mode = SImode;
10248 int index_bits = GET_MODE_BITSIZE (index_mode);
10249 rtx op1, op2, index;
10250 enum machine_mode op_mode;
10255 /* Convert the index to SImode. */
10256 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10258 enum machine_mode omode = TYPE_MODE (index_type);
10259 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10261 /* We must handle the endpoints in the original mode. */
10262 index_expr = build (MINUS_EXPR, index_type,
10263 index_expr, minval);
10264 minval = integer_zero_node;
10265 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10266 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10267 omode, 1, default_label);
10268 /* Now we can safely truncate. */
10269 index = convert_to_mode (index_mode, index, 0);
10273 if (TYPE_MODE (index_type) != index_mode)
10275 index_expr = convert ((*lang_hooks.types.type_for_size)
10276 (index_bits, 0), index_expr);
10277 index_type = TREE_TYPE (index_expr);
10280 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10283 index = protect_from_queue (index, 0);
10284 do_pending_stack_adjust ();
10286 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10287 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10289 index = copy_to_mode_reg (op_mode, index);
10291 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10293 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10294 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10295 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10296 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10298 op1 = copy_to_mode_reg (op_mode, op1);
10300 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10302 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10303 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10304 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10305 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10307 op2 = copy_to_mode_reg (op_mode, op2);
10309 emit_jump_insn (gen_casesi (index, op1, op2,
10310 table_label, default_label));
10314 /* Attempt to generate a tablejump instruction; same concept. */
10315 #ifndef HAVE_tablejump
10316 #define HAVE_tablejump 0
10317 #define gen_tablejump(x, y) (0)
10320 /* Subroutine of the next function.
10322 INDEX is the value being switched on, with the lowest value
10323 in the table already subtracted.
10324 MODE is its expected mode (needed if INDEX is constant).
10325 RANGE is the length of the jump table.
10326 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10328 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10329 index value is out of range. */
10332 do_tablejump (index, mode, range, table_label, default_label)
10333 rtx index, range, table_label, default_label;
10334 enum machine_mode mode;
10338 if (INTVAL (range) > cfun->max_jumptable_ents)
10339 cfun->max_jumptable_ents = INTVAL (range);
10341 /* Do an unsigned comparison (in the proper mode) between the index
10342 expression and the value which represents the length of the range.
10343 Since we just finished subtracting the lower bound of the range
10344 from the index expression, this comparison allows us to simultaneously
10345 check that the original index expression value is both greater than
10346 or equal to the minimum value of the range and less than or equal to
10347 the maximum value of the range. */
10349 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10352 /* If index is in range, it must fit in Pmode.
10353 Convert to Pmode so we can index with it. */
10355 index = convert_to_mode (Pmode, index, 1);
10357 /* Don't let a MEM slip thru, because then INDEX that comes
10358 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10359 and break_out_memory_refs will go to work on it and mess it up. */
10360 #ifdef PIC_CASE_VECTOR_ADDRESS
10361 if (flag_pic && GET_CODE (index) != REG)
10362 index = copy_to_mode_reg (Pmode, index);
10365 /* If flag_force_addr were to affect this address
10366 it could interfere with the tricky assumptions made
10367 about addresses that contain label-refs,
10368 which may be valid only very near the tablejump itself. */
10369 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10370 GET_MODE_SIZE, because this indicates how large insns are. The other
10371 uses should all be Pmode, because they are addresses. This code
10372 could fail if addresses and insns are not the same size. */
10373 index = gen_rtx_PLUS (Pmode,
10374 gen_rtx_MULT (Pmode, index,
10375 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10376 gen_rtx_LABEL_REF (Pmode, table_label));
10377 #ifdef PIC_CASE_VECTOR_ADDRESS
10379 index = PIC_CASE_VECTOR_ADDRESS (index);
10382 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10383 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10384 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10385 RTX_UNCHANGING_P (vector) = 1;
10386 MEM_NOTRAP_P (vector) = 1;
10387 convert_move (temp, vector, 0);
10389 emit_jump_insn (gen_tablejump (temp, table_label));
10391 /* If we are generating PIC code or if the table is PC-relative, the
10392 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10393 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10398 try_tablejump (index_type, index_expr, minval, range,
10399 table_label, default_label)
10400 tree index_type, index_expr, minval, range;
10401 rtx table_label, default_label;
10405 if (! HAVE_tablejump)
10408 index_expr = fold (build (MINUS_EXPR, index_type,
10409 convert (index_type, index_expr),
10410 convert (index_type, minval)));
10411 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10413 index = protect_from_queue (index, 0);
10414 do_pending_stack_adjust ();
10416 do_tablejump (index, TYPE_MODE (index_type),
10417 convert_modes (TYPE_MODE (index_type),
10418 TYPE_MODE (TREE_TYPE (range)),
10419 expand_expr (range, NULL_RTX,
10421 TREE_UNSIGNED (TREE_TYPE (range))),
10422 table_label, default_label);
10426 /* Nonzero if the mode is a valid vector mode for this architecture.
10427 This returns nonzero even if there is no hardware support for the
10428 vector mode, but we can emulate with narrower modes. */
10431 vector_mode_valid_p (mode)
10432 enum machine_mode mode;
10434 enum mode_class class = GET_MODE_CLASS (mode);
10435 enum machine_mode innermode;
10437 /* Doh! What's going on? */
10438 if (class != MODE_VECTOR_INT
10439 && class != MODE_VECTOR_FLOAT)
10442 /* Hardware support. Woo hoo! */
10443 if (VECTOR_MODE_SUPPORTED_P (mode))
10446 innermode = GET_MODE_INNER (mode);
10448 /* We should probably return 1 if requesting V4DI and we have no DI,
10449 but we have V2DI, but this is probably very unlikely. */
10451 /* If we have support for the inner mode, we can safely emulate it.
10452 We may not have V2DI, but me can emulate with a pair of DIs. */
10453 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10456 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10458 const_vector_from_tree (exp)
10464 enum machine_mode inner, mode;
10466 mode = TYPE_MODE (TREE_TYPE (exp));
10468 if (is_zeros_p (exp))
10469 return CONST0_RTX (mode);
10471 units = GET_MODE_NUNITS (mode);
10472 inner = GET_MODE_INNER (mode);
10474 v = rtvec_alloc (units);
10476 link = TREE_VECTOR_CST_ELTS (exp);
10477 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10479 elt = TREE_VALUE (link);
10481 if (TREE_CODE (elt) == REAL_CST)
10482 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10485 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10486 TREE_INT_CST_HIGH (elt),
10490 return gen_rtx_raw_CONST_VECTOR (mode, v);
10493 #include "gt-expr.h"