1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* These arrays record the insn_code of two different kinds of insns
239 to perform block compares. */
240 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
241 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
244 struct file_stack *expr_wfl_stack;
246 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248 #ifndef SLOW_UNALIGNED_ACCESS
249 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
252 /* This is run once per compilation to set up which modes can be used
253 directly in memory and to initialize the block move optab. */
256 init_expr_once (void)
259 enum machine_mode mode;
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
268 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270 /* A scratch register we can modify in-place below to avoid
271 useless RTL allocations. */
272 reg = gen_rtx_REG (VOIDmode, -1);
274 insn = rtx_alloc (INSN);
275 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
276 PATTERN (insn) = pat;
278 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
279 mode = (enum machine_mode) ((int) mode + 1))
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
286 PUT_MODE (reg, mode);
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
291 if (mode != VOIDmode && mode != BLKmode)
292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
296 if (! HARD_REGNO_MODE_OK (regno, mode))
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = mem1;
307 SET_DEST (pat) = reg;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_load[(int) mode] = 1;
312 SET_DEST (pat) = mem;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
317 SET_DEST (pat) = mem1;
318 if (recog (pat, insn, &num_clobbers) >= 0)
319 direct_store[(int) mode] = 1;
323 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
326 mode = GET_MODE_WIDER_MODE (mode))
328 enum machine_mode srcmode;
329 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
330 srcmode = GET_MODE_WIDER_MODE (srcmode))
334 ic = can_extend_p (mode, srcmode, 0);
335 if (ic == CODE_FOR_nothing)
338 PUT_MODE (mem, srcmode);
340 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
341 float_extend_from_mem[mode][srcmode] = true;
346 /* This is run at the start of compiling a function. */
351 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
354 /* Small sanity check that the queue is empty at the end of a function. */
357 finish_expr_for_function (void)
363 /* Manage the queue of increment instructions to be output
364 for POSTINCREMENT_EXPR expressions, etc. */
366 /* Queue up to increment (or change) VAR later. BODY says how:
367 BODY should be the same thing you would pass to emit_insn
368 to increment right away. It will go to emit_insn later on.
370 The value is a QUEUED expression to be used in place of VAR
371 where you want to guarantee the pre-incrementation value of VAR. */
374 enqueue_insn (rtx var, rtx body)
376 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377 body, pending_chain);
378 return pending_chain;
381 /* Use protect_from_queue to convert a QUEUED expression
382 into something that you can put immediately into an instruction.
383 If the queued incrementation has not happened yet,
384 protect_from_queue returns the variable itself.
385 If the incrementation has happened, protect_from_queue returns a temp
386 that contains a copy of the old value of the variable.
388 Any time an rtx which might possibly be a QUEUED is to be put
389 into an instruction, it must be passed through protect_from_queue first.
390 QUEUED expressions are not meaningful in instructions.
392 Do not pass a value through protect_from_queue and then hold
393 on to it for a while before putting it in an instruction!
394 If the queue is flushed in between, incorrect code will result. */
397 protect_from_queue (rtx x, int modify)
399 RTX_CODE code = GET_CODE (x);
401 #if 0 /* A QUEUED can hang around after the queue is forced out. */
402 /* Shortcut for most common case. */
403 if (pending_chain == 0)
409 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
410 use of autoincrement. Make a copy of the contents of the memory
411 location rather than a copy of the address, but not if the value is
412 of mode BLKmode. Don't modify X in place since it might be
414 if (code == MEM && GET_MODE (x) != BLKmode
415 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
418 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
422 rtx temp = gen_reg_rtx (GET_MODE (x));
424 emit_insn_before (gen_move_insn (temp, new),
429 /* Copy the address into a pseudo, so that the returned value
430 remains correct across calls to emit_queue. */
431 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
434 /* Otherwise, recursively protect the subexpressions of all
435 the kinds of rtx's that can contain a QUEUED. */
438 rtx tem = protect_from_queue (XEXP (x, 0), 0);
439 if (tem != XEXP (x, 0))
445 else if (code == PLUS || code == MULT)
447 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
448 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 /* If the increment has not happened, use the variable itself. Copy it
459 into a new pseudo so that the value remains correct across calls to
461 if (QUEUED_INSN (x) == 0)
462 return copy_to_reg (QUEUED_VAR (x));
463 /* If the increment has happened and a pre-increment copy exists,
465 if (QUEUED_COPY (x) != 0)
466 return QUEUED_COPY (x);
467 /* The increment has happened but we haven't set up a pre-increment copy.
468 Set one up now, and use it. */
469 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
470 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 return QUEUED_COPY (x);
475 /* Return nonzero if X contains a QUEUED expression:
476 if it contains anything that will be altered by a queued increment.
477 We handle only combinations of MEM, PLUS, MINUS and MULT operators
478 since memory addresses generally contain only those. */
481 queued_subexp_p (rtx x)
483 enum rtx_code code = GET_CODE (x);
489 return queued_subexp_p (XEXP (x, 0));
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
500 /* Perform all the pending incrementations. */
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
518 QUEUED_INSN (p) = body;
522 #ifdef ENABLE_CHECKING
529 QUEUED_INSN (p) = emit_insn (body);
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
543 convert_move (rtx to, rtx from, int unsignedp)
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
554 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
608 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
610 else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
629 /* This conversion is not implemented yet. */
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 insns = get_insns ();
637 emit_libcall_block (insns, to, value,
638 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 : gen_rtx_FLOAT_EXTEND (to_mode, from));
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652 if (trunc_optab->handlers[to_mode][full_mode].insn_code
656 if (full_mode != from_mode)
657 from = convert_to_mode (full_mode, from, unsignedp);
658 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
662 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667 if (sext_optab->handlers[full_mode][from_mode].insn_code
671 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 if (to_mode == full_mode)
676 /* else proceed to integer conversions below */
677 from_mode = full_mode;
680 /* Now both modes are integers. */
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
684 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
691 enum machine_mode lowpart_mode;
692 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694 /* Try converting directly if the insn is supported. */
695 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize > 0 && GET_CODE (from) == SUBREG)
703 from = force_reg (from_mode, from);
704 emit_unop_insn (code, to, from, equiv_code);
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
709 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
710 != CODE_FOR_nothing))
712 if (GET_CODE (to) == REG)
713 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
714 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
715 emit_unop_insn (code, to,
716 gen_lowpart (word_mode, to), equiv_code);
720 /* No special multiword conversion insn; do it by hand. */
723 /* Since we will turn this into a no conflict block, we must ensure
724 that the source does not overlap the target. */
726 if (reg_overlap_mentioned_p (to, from))
727 from = force_reg (from_mode, from);
729 /* Get a copy of FROM widened to a word, if necessary. */
730 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
731 lowpart_mode = word_mode;
733 lowpart_mode = from_mode;
735 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737 lowpart = gen_lowpart (lowpart_mode, to);
738 emit_move_insn (lowpart, lowfrom);
740 /* Compute the value to put in each remaining word. */
742 fill_value = const0_rtx;
747 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
748 && STORE_FLAG_VALUE == -1)
750 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 fill_value = gen_reg_rtx (word_mode);
753 emit_insn (gen_slt (fill_value));
759 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
760 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 fill_value = convert_to_mode (word_mode, fill_value, 1);
766 /* Fill the remaining words. */
767 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
770 rtx subword = operand_subword (to, index, 1, to_mode);
775 if (fill_value != subword)
776 emit_move_insn (subword, fill_value);
779 insns = get_insns ();
782 emit_no_conflict_block (insns, to, from, NULL_RTX,
783 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
787 /* Truncating multi-word to a word or less. */
788 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
789 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
791 if (!((GET_CODE (from) == MEM
792 && ! MEM_VOLATILE_P (from)
793 && direct_load[(int) to_mode]
794 && ! mode_dependent_address_p (XEXP (from, 0)))
795 || GET_CODE (from) == REG
796 || GET_CODE (from) == SUBREG))
797 from = force_reg (from_mode, from);
798 convert_move (to, gen_lowpart (word_mode, from), 0);
802 /* Now follow all the conversions between integers
803 no more than a word long. */
805 /* For truncation, usually we can just refer to FROM in a narrower mode. */
806 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
808 GET_MODE_BITSIZE (from_mode)))
810 if (!((GET_CODE (from) == MEM
811 && ! MEM_VOLATILE_P (from)
812 && direct_load[(int) to_mode]
813 && ! mode_dependent_address_p (XEXP (from, 0)))
814 || GET_CODE (from) == REG
815 || GET_CODE (from) == SUBREG))
816 from = force_reg (from_mode, from);
817 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
818 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
819 from = copy_to_reg (from);
820 emit_move_insn (to, gen_lowpart (to_mode, from));
824 /* Handle extension. */
825 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 /* Convert directly if that works. */
828 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
832 from = force_not_mem (from);
834 emit_unop_insn (code, to, from, equiv_code);
839 enum machine_mode intermediate;
843 /* Search for a mode to convert via. */
844 for (intermediate = from_mode; intermediate != VOIDmode;
845 intermediate = GET_MODE_WIDER_MODE (intermediate))
846 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
849 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
850 GET_MODE_BITSIZE (intermediate))))
851 && (can_extend_p (intermediate, from_mode, unsignedp)
852 != CODE_FOR_nothing))
854 convert_move (to, convert_to_mode (intermediate, from,
855 unsignedp), unsignedp);
859 /* No suitable intermediate mode.
860 Generate what we need with shifts. */
861 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
862 - GET_MODE_BITSIZE (from_mode), 0);
863 from = gen_lowpart (to_mode, force_reg (from_mode, from));
864 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
869 emit_move_insn (to, tmp);
874 /* Support special truncate insns for certain modes. */
875 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
877 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
882 /* Handle truncation of volatile memrefs, and so on;
883 the things that couldn't be truncated directly,
884 and for which there was no special instruction.
886 ??? Code above formerly short-circuited this, for most integer
887 mode pairs, with a force_reg in from_mode followed by a recursive
888 call to this routine. Appears always to have been wrong. */
889 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
892 emit_move_insn (to, temp);
896 /* Mode combination is not recognized. */
900 /* Return an rtx for a value that would result
901 from converting X to mode MODE.
902 Both X and MODE may be floating, or both integer.
903 UNSIGNEDP is nonzero if X is an unsigned value.
904 This can be done by referring to a part of X in place
905 or by copying to a new temporary with conversion.
907 This function *must not* call protect_from_queue
908 except when putting X into an insn (in which case convert_move does it). */
911 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
913 return convert_modes (mode, VOIDmode, x, unsignedp);
916 /* Return an rtx for a value that would result
917 from converting X from mode OLDMODE to mode MODE.
918 Both modes may be floating, or both integer.
919 UNSIGNEDP is nonzero if X is an unsigned value.
921 This can be done by referring to a part of X in place
922 or by copying to a new temporary with conversion.
924 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926 This function *must not* call protect_from_queue
927 except when putting X into an insn (in which case convert_move does it). */
930 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
934 /* If FROM is a SUBREG that indicates that we have already done at least
935 the required extension, strip it. */
937 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
938 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
939 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
940 x = gen_lowpart (mode, x);
942 if (GET_MODE (x) != VOIDmode)
943 oldmode = GET_MODE (x);
948 /* There is one case that we must handle specially: If we are converting
949 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
950 we are to interpret the constant as unsigned, gen_lowpart will do
951 the wrong if the constant appears negative. What we want to do is
952 make the high-order word of the constant zero, not all ones. */
954 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
955 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
956 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
958 HOST_WIDE_INT val = INTVAL (x);
960 if (oldmode != VOIDmode
961 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 int width = GET_MODE_BITSIZE (oldmode);
965 /* We need to zero extend VAL. */
966 val &= ((HOST_WIDE_INT) 1 << width) - 1;
969 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
972 /* We can do this with a gen_lowpart if both desired and current modes
973 are integer, and this is either a constant integer, a register, or a
974 non-volatile MEM. Except for the constant case where MODE is no
975 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
977 if ((GET_CODE (x) == CONST_INT
978 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
979 || (GET_MODE_CLASS (mode) == MODE_INT
980 && GET_MODE_CLASS (oldmode) == MODE_INT
981 && (GET_CODE (x) == CONST_DOUBLE
982 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
983 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
984 && direct_load[(int) mode])
985 || (GET_CODE (x) == REG
986 && (! HARD_REGISTER_P (x)
987 || HARD_REGNO_MODE_OK (REGNO (x), mode))
988 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
989 GET_MODE_BITSIZE (GET_MODE (x)))))))))
991 /* ?? If we don't know OLDMODE, we have to assume here that
992 X does not need sign- or zero-extension. This may not be
993 the case, but it's the best we can do. */
994 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
995 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 HOST_WIDE_INT val = INTVAL (x);
998 int width = GET_MODE_BITSIZE (oldmode);
1000 /* We must sign or zero-extend in this case. Start by
1001 zero-extending, then sign extend if we need to. */
1002 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1005 val |= (HOST_WIDE_INT) (-1) << width;
1007 return gen_int_mode (val, mode);
1010 return gen_lowpart (mode, x);
1013 /* Converting from integer constant into mode is always equivalent to an
1014 subreg operation. */
1015 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 return simplify_gen_subreg (mode, x, oldmode, 0);
1022 temp = gen_reg_rtx (mode);
1023 convert_move (temp, x, unsignedp);
1027 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1028 store efficiently. Due to internal GCC limitations, this is
1029 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1030 for an immediate constant. */
1032 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034 /* Determine whether the LEN bytes can be moved by using several move
1035 instructions. Return nonzero if a call to move_by_pieces should
1039 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1040 unsigned int align ATTRIBUTE_UNUSED)
1042 return MOVE_BY_PIECES_P (len, align);
1045 /* Generate several move instructions to copy LEN bytes from block FROM to
1046 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1047 and TO through protect_from_queue before calling.
1049 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1050 used to push FROM to the stack.
1052 ALIGN is maximum stack alignment we can assume.
1054 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1055 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1059 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1060 unsigned int align, int endp)
1062 struct move_by_pieces data;
1063 rtx to_addr, from_addr = XEXP (from, 0);
1064 unsigned int max_size = MOVE_MAX_PIECES + 1;
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1068 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1071 data.from_addr = from_addr;
1074 to_addr = XEXP (to, 0);
1077 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1078 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1087 #ifdef STACK_GROWS_DOWNWARD
1093 data.to_addr = to_addr;
1096 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1097 || GET_CODE (from_addr) == POST_INC
1098 || GET_CODE (from_addr) == POST_DEC);
1100 data.explicit_inc_from = 0;
1101 data.explicit_inc_to = 0;
1102 if (data.reverse) data.offset = len;
1105 /* If copying requires more than two move insns,
1106 copy addresses to registers (to make displacements shorter)
1107 and use post-increment if available. */
1108 if (!(data.autinc_from && data.autinc_to)
1109 && move_by_pieces_ninsns (len, align) > 2)
1111 /* Find the mode of the largest move... */
1112 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1113 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1114 if (GET_MODE_SIZE (tmode) < max_size)
1117 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1119 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1120 data.autinc_from = 1;
1121 data.explicit_inc_from = -1;
1123 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1125 data.from_addr = copy_addr_to_reg (from_addr);
1126 data.autinc_from = 1;
1127 data.explicit_inc_from = 1;
1129 if (!data.autinc_from && CONSTANT_P (from_addr))
1130 data.from_addr = copy_addr_to_reg (from_addr);
1131 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1133 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.explicit_inc_to = -1;
1137 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1139 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.explicit_inc_to = 1;
1143 if (!data.autinc_to && CONSTANT_P (to_addr))
1144 data.to_addr = copy_addr_to_reg (to_addr);
1147 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1148 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1149 align = MOVE_MAX * BITS_PER_UNIT;
1151 /* First move what we can in the largest integer mode, then go to
1152 successively smaller modes. */
1154 while (max_size > 1)
1156 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1157 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1158 if (GET_MODE_SIZE (tmode) < max_size)
1161 if (mode == VOIDmode)
1164 icode = mov_optab->handlers[(int) mode].insn_code;
1165 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1166 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168 max_size = GET_MODE_SIZE (mode);
1171 /* The code above should have handled everything. */
1185 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1186 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1191 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1198 to1 = adjust_address (data.to, QImode, data.offset);
1206 /* Return number of insns required to move L bytes by pieces.
1207 ALIGN (in bits) is maximum alignment we can assume. */
1209 static unsigned HOST_WIDE_INT
1210 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1212 unsigned HOST_WIDE_INT n_insns = 0;
1213 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1215 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1216 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1217 align = MOVE_MAX * BITS_PER_UNIT;
1219 while (max_size > 1)
1221 enum machine_mode mode = VOIDmode, tmode;
1222 enum insn_code icode;
1224 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1225 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1226 if (GET_MODE_SIZE (tmode) < max_size)
1229 if (mode == VOIDmode)
1232 icode = mov_optab->handlers[(int) mode].insn_code;
1233 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1234 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236 max_size = GET_MODE_SIZE (mode);
1244 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1245 with move instructions for mode MODE. GENFUN is the gen_... function
1246 to make a move insn for that mode. DATA has all the other info. */
1249 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1250 struct move_by_pieces *data)
1252 unsigned int size = GET_MODE_SIZE (mode);
1253 rtx to1 = NULL_RTX, from1;
1255 while (data->len >= size)
1258 data->offset -= size;
1262 if (data->autinc_to)
1263 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1266 to1 = adjust_address (data->to, mode, data->offset);
1269 if (data->autinc_from)
1270 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1273 from1 = adjust_address (data->from, mode, data->offset);
1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1276 emit_insn (gen_add2_insn (data->to_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
1278 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1279 emit_insn (gen_add2_insn (data->from_addr,
1280 GEN_INT (-(HOST_WIDE_INT)size)));
1283 emit_insn ((*genfun) (to1, from1));
1286 #ifdef PUSH_ROUNDING
1287 emit_single_push_insn (mode, from1, NULL);
1293 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1294 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1295 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1296 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1298 if (! data->reverse)
1299 data->offset += size;
1305 /* Emit code to move a block Y to a block X. This may be done with
1306 string-move instructions, with multiple scalar move instructions,
1307 or with a library call.
1309 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1310 SIZE is an rtx that says how long they are.
1311 ALIGN is the maximum alignment we can assume they have.
1312 METHOD describes what kind of copy this is, and what mechanisms may be used.
1314 Return the address of the new block, if memcpy is called and returns it,
1318 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1326 case BLOCK_OP_NORMAL:
1327 may_use_call = true;
1330 case BLOCK_OP_CALL_PARM:
1331 may_use_call = block_move_libcall_safe_for_call_parm ();
1333 /* Make inhibit_defer_pop nonzero around the library call
1334 to force it to pop the arguments right away. */
1338 case BLOCK_OP_NO_LIBCALL:
1339 may_use_call = false;
1346 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1348 if (GET_MODE (x) != BLKmode)
1350 if (GET_MODE (y) != BLKmode)
1353 x = protect_from_queue (x, 1);
1354 y = protect_from_queue (y, 0);
1355 size = protect_from_queue (size, 0);
1357 if (GET_CODE (x) != MEM)
1359 if (GET_CODE (y) != MEM)
1364 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1365 can be incorrect is coming from __builtin_memcpy. */
1366 if (GET_CODE (size) == CONST_INT)
1368 if (INTVAL (size) == 0)
1371 x = shallow_copy_rtx (x);
1372 y = shallow_copy_rtx (y);
1373 set_mem_size (x, size);
1374 set_mem_size (y, size);
1377 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1378 move_by_pieces (x, y, INTVAL (size), align, 0);
1379 else if (emit_block_move_via_movstr (x, y, size, align))
1381 else if (may_use_call)
1382 retval = emit_block_move_via_libcall (x, y, size);
1384 emit_block_move_via_loop (x, y, size, align);
1386 if (method == BLOCK_OP_CALL_PARM)
1392 /* A subroutine of emit_block_move. Returns true if calling the
1393 block move libcall will not clobber any parameters which may have
1394 already been placed on the stack. */
1397 block_move_libcall_safe_for_call_parm (void)
1399 /* If arguments are pushed on the stack, then they're safe. */
1403 /* If registers go on the stack anyway, any argument is sure to clobber
1404 an outgoing argument. */
1405 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 tree fn = emit_block_move_libcall_fn (false);
1409 if (REG_PARM_STACK_SPACE (fn) != 0)
1414 /* If any argument goes in memory, then it might clobber an outgoing
1417 CUMULATIVE_ARGS args_so_far;
1420 fn = emit_block_move_libcall_fn (false);
1421 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1423 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1424 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1427 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1428 if (!tmp || !REG_P (tmp))
1430 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1431 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1435 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1441 /* A subroutine of emit_block_move. Expand a movstr pattern;
1442 return true if successful. */
1445 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1447 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1448 enum machine_mode mode;
1450 /* Since this is a move insn, we don't care about volatility. */
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1480 rtx last = get_last_insn ();
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1501 delete_insns_since (last);
1509 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
1513 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 rtx dst_addr, src_addr;
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1553 size_mode = TYPE_MODE (unsigned_type_node);
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1564 For convenience, we generate the call to bcopy this way as well. */
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1569 size_tree = make_tree (unsigned_type_node, size);
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1595 if (RTX_UNCHANGING_P (dst))
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1604 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
1608 static GTY(()) tree block_move_fn;
1611 init_block_move_fn (const char *asmspec)
1617 if (TARGET_MEM_FUNCTIONS)
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1649 emit_block_move_libcall_fn (int for_call)
1651 static bool emitted_extern;
1654 init_block_move_fn (NULL);
1656 if (for_call && !emitted_extern)
1658 emitted_extern = true;
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
1663 return block_move_fn;
1666 /* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668 /* ??? It'd be nice to copy in hunks larger than QImode. */
1671 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1685 emit_move_insn (iter, const0_rtx);
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1691 emit_note (NOTE_INSN_LOOP_BEG);
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1702 emit_move_insn (x, y);
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1707 emit_move_insn (iter, tmp);
1709 emit_note (NOTE_INSN_LOOP_CONT);
1710 emit_label (cmp_label);
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1715 emit_note (NOTE_INSN_LOOP_END);
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1725 #ifdef HAVE_load_multiple
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1749 delete_insns_since (last);
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1762 move_block_from_reg (int regno, rtx x, int nregs)
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782 delete_insns_since (last);
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1809 if (GET_CODE (orig) != PARALLEL)
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1843 if (GET_CODE (dst) != PARALLEL)
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src)
1951 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1954 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1955 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1970 /* Emit code to move a block SRC to block DST, where SRC and DST are
1971 non-consecutive groups of registers, each represented by a PARALLEL. */
1974 emit_group_move (rtx dst, rtx src)
1978 if (GET_CODE (src) != PARALLEL
1979 || GET_CODE (dst) != PARALLEL
1980 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1983 /* Skip first entry if NULL. */
1984 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1985 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1986 XEXP (XVECEXP (src, 0, i), 0));
1989 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1990 where SRC is non-consecutive registers represented by a PARALLEL.
1991 SSIZE represents the total size of block ORIG_DST, or -1 if not
1995 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2000 if (GET_CODE (src) != PARALLEL)
2003 /* Check for a NULL entry, used to indicate that the parameter goes
2004 both on the stack and in registers. */
2005 if (XEXP (XVECEXP (src, 0, 0), 0))
2010 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2012 /* Copy the (probable) hard regs into pseudos. */
2013 for (i = start; i < XVECLEN (src, 0); i++)
2015 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2016 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2017 emit_move_insn (tmps[i], reg);
2021 /* If we won't be storing directly into memory, protect the real destination
2022 from strange tricks we might play. */
2024 if (GET_CODE (dst) == PARALLEL)
2028 /* We can get a PARALLEL dst if there is a conditional expression in
2029 a return statement. In that case, the dst and src are the same,
2030 so no action is necessary. */
2031 if (rtx_equal_p (dst, src))
2034 /* It is unclear if we can ever reach here, but we may as well handle
2035 it. Allocate a temporary, and split this into a store/load to/from
2038 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2039 emit_group_store (temp, src, type, ssize);
2040 emit_group_load (dst, temp, type, ssize);
2043 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2045 dst = gen_reg_rtx (GET_MODE (orig_dst));
2046 /* Make life a bit easier for combine. */
2047 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2050 /* Process the pieces. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2053 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2054 enum machine_mode mode = GET_MODE (tmps[i]);
2055 unsigned int bytelen = GET_MODE_SIZE (mode);
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2064 #ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2066 == (BYTES_BIG_ENDIAN ? upward : downward)
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2076 bytelen = ssize - bytepos;
2079 if (GET_CODE (dst) == CONCAT)
2081 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2082 dest = XEXP (dst, 0);
2083 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2086 dest = XEXP (dst, 1);
2088 else if (bytepos == 0 && XVECLEN (src, 0))
2090 dest = assign_stack_temp (GET_MODE (dest),
2091 GET_MODE_SIZE (GET_MODE (dest)), 0);
2092 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2101 /* Optimize the access just a bit. */
2102 if (GET_CODE (dest) == MEM
2103 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2104 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2105 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2106 && bytelen == GET_MODE_SIZE (mode))
2107 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], ssize);
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (orig_dst != dst)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2130 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2139 tgtblk = assign_temp (build_qualified_type (type,
2141 | TYPE_QUAL_CONST)),
2143 preserve_temp_slots (tgtblk);
2146 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2147 into a new pseudo which is a full word. */
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2151 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153 /* Structures whose size is not a multiple of a word are aligned
2154 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2155 machine, this means we must skip the empty high order bytes when
2156 calculating the bit offset. */
2157 if (BYTES_BIG_ENDIAN
2158 && bytes % UNITS_PER_WORD)
2159 big_endian_correction
2160 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2162 /* Copy the structure BITSIZE bites at a time.
2164 We could probably emit more efficient code for machines which do not use
2165 strict alignment, but it doesn't seem worth the effort at the current
2167 for (bitpos = 0, xbitpos = big_endian_correction;
2168 bitpos < bytes * BITS_PER_UNIT;
2169 bitpos += bitsize, xbitpos += bitsize)
2171 /* We need a new source operand each time xbitpos is on a
2172 word boundary and when xbitpos == big_endian_correction
2173 (the first time through). */
2174 if (xbitpos % BITS_PER_WORD == 0
2175 || xbitpos == big_endian_correction)
2176 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2179 /* We need a new destination operand each time bitpos is on
2181 if (bitpos % BITS_PER_WORD == 0)
2182 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2184 /* Use xbitpos for the source extraction (right justified) and
2185 xbitpos for the destination store (left justified). */
2186 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2187 extract_bit_field (src, bitsize,
2188 xbitpos % BITS_PER_WORD, 1,
2189 NULL_RTX, word_mode, word_mode,
2197 /* Add a USE expression for REG to the (possibly empty) list pointed
2198 to by CALL_FUSAGE. REG must denote a hard register. */
2201 use_reg (rtx *call_fusage, rtx reg)
2203 if (GET_CODE (reg) != REG
2204 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2216 use_regs (rtx *call_fusage, int regno, int nregs)
2220 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2223 for (i = 0; i < nregs; i++)
2224 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2227 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2228 PARALLEL REGS. This is for calls that pass values in multiple
2229 non-contiguous locations. The Irix 6 ABI has examples of this. */
2232 use_group_regs (rtx *call_fusage, rtx regs)
2236 for (i = 0; i < XVECLEN (regs, 0); i++)
2238 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2240 /* A NULL entry means the parameter goes both on the stack and in
2241 registers. This can also be a MEM for targets that pass values
2242 partially on the stack and partially in registers. */
2243 if (reg != 0 && GET_CODE (reg) == REG)
2244 use_reg (call_fusage, reg);
2249 /* Determine whether the LEN bytes generated by CONSTFUN can be
2250 stored to memory using several move instructions. CONSTFUNDATA is
2251 a pointer which will be passed as argument in every CONSTFUN call.
2252 ALIGN is maximum alignment we can assume. Return nonzero if a
2253 call to store_by_pieces should succeed. */
2256 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2257 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2258 void *constfundata, unsigned int align)
2260 unsigned HOST_WIDE_INT max_size, l;
2261 HOST_WIDE_INT offset = 0;
2262 enum machine_mode mode, tmode;
2263 enum insn_code icode;
2270 if (! STORE_BY_PIECES_P (len, align))
2273 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2274 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2275 align = MOVE_MAX * BITS_PER_UNIT;
2277 /* We would first store what we can in the largest integer mode, then go to
2278 successively smaller modes. */
2281 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2286 max_size = STORE_MAX_PIECES + 1;
2287 while (max_size > 1)
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2294 if (mode == VOIDmode)
2297 icode = mov_optab->handlers[(int) mode].insn_code;
2298 if (icode != CODE_FOR_nothing
2299 && align >= GET_MODE_ALIGNMENT (mode))
2301 unsigned int size = GET_MODE_SIZE (mode);
2308 cst = (*constfun) (constfundata, offset, mode);
2309 if (!LEGITIMATE_CONSTANT_P (cst))
2319 max_size = GET_MODE_SIZE (mode);
2322 /* The code above should have handled everything. */
2330 /* Generate several move instructions to store LEN bytes generated by
2331 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2332 pointer which will be passed as argument in every CONSTFUN call.
2333 ALIGN is maximum alignment we can assume.
2334 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2335 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2339 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2340 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2341 void *constfundata, unsigned int align, int endp)
2343 struct store_by_pieces data;
2352 if (! STORE_BY_PIECES_P (len, align))
2354 to = protect_from_queue (to, 1);
2355 data.constfun = constfun;
2356 data.constfundata = constfundata;
2359 store_by_pieces_1 (&data, align);
2370 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2371 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2373 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2376 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2383 to1 = adjust_address (data.to, QImode, data.offset);
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2396 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2398 struct store_by_pieces data;
2403 data.constfun = clear_by_pieces_1;
2404 data.constfundata = NULL;
2407 store_by_pieces_1 (&data, align);
2410 /* Callback routine for clear_by_pieces.
2411 Return const0_rtx unconditionally. */
2414 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2415 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2416 enum machine_mode mode ATTRIBUTE_UNUSED)
2421 /* Subroutine of clear_by_pieces and store_by_pieces.
2422 Generate several move instructions to store LEN bytes of block TO. (A MEM
2423 rtx with BLKmode). The caller must pass TO through protect_from_queue
2424 before calling. ALIGN is maximum alignment we can assume. */
2427 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2428 unsigned int align ATTRIBUTE_UNUSED)
2430 rtx to_addr = XEXP (data->to, 0);
2431 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2432 enum machine_mode mode = VOIDmode, tmode;
2433 enum insn_code icode;
2436 data->to_addr = to_addr;
2438 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2439 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2441 data->explicit_inc_to = 0;
2443 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2445 data->offset = data->len;
2447 /* If storing requires more than two move insns,
2448 copy addresses to registers (to make displacements shorter)
2449 and use post-increment if available. */
2450 if (!data->autinc_to
2451 && move_by_pieces_ninsns (data->len, align) > 2)
2453 /* Determine the main mode we'll be using. */
2454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 if (GET_MODE_SIZE (tmode) < max_size)
2459 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = -1;
2466 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2467 && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = 1;
2474 if ( !data->autinc_to && CONSTANT_P (to_addr))
2475 data->to_addr = copy_addr_to_reg (to_addr);
2478 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2479 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2480 align = MOVE_MAX * BITS_PER_UNIT;
2482 /* First store what we can in the largest integer mode, then go to
2483 successively smaller modes. */
2485 while (max_size > 1)
2487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2489 if (GET_MODE_SIZE (tmode) < max_size)
2492 if (mode == VOIDmode)
2495 icode = mov_optab->handlers[(int) mode].insn_code;
2496 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2497 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2499 max_size = GET_MODE_SIZE (mode);
2502 /* The code above should have handled everything. */
2507 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2508 with move instructions for mode MODE. GENFUN is the gen_... function
2509 to make a move insn for that mode. DATA has all the other info. */
2512 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2513 struct store_by_pieces *data)
2515 unsigned int size = GET_MODE_SIZE (mode);
2518 while (data->len >= size)
2521 data->offset -= size;
2523 if (data->autinc_to)
2524 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2527 to1 = adjust_address (data->to, mode, data->offset);
2529 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2530 emit_insn (gen_add2_insn (data->to_addr,
2531 GEN_INT (-(HOST_WIDE_INT) size)));
2533 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2534 emit_insn ((*genfun) (to1, cst));
2536 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2537 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2539 if (! data->reverse)
2540 data->offset += size;
2546 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2547 its length in bytes. */
2550 clear_storage (rtx object, rtx size)
2553 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2554 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2556 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2557 just move a zero. Otherwise, do this a piece at a time. */
2558 if (GET_MODE (object) != BLKmode
2559 && GET_CODE (size) == CONST_INT
2560 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2561 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2564 object = protect_from_queue (object, 1);
2565 size = protect_from_queue (size, 0);
2567 if (size == const0_rtx)
2569 else if (GET_CODE (size) == CONST_INT
2570 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else if (clear_storage_via_clrstr (object, size, align))
2575 retval = clear_storage_via_libcall (object, size);
2581 /* A subroutine of clear_storage. Expand a clrstr pattern;
2582 return true if successful. */
2585 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2587 /* Try the most limited insn first, because there's no point
2588 including more than one in the machine description unless
2589 the more limited one has some advantage. */
2591 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2592 enum machine_mode mode;
2594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2595 mode = GET_MODE_WIDER_MODE (mode))
2597 enum insn_code code = clrstr_optab[(int) mode];
2598 insn_operand_predicate_fn pred;
2600 if (code != CODE_FOR_nothing
2601 /* We don't need MODE to be narrower than
2602 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2603 the mode mask, as it is returned by the macro, it will
2604 definitely be less than the actual mode mask. */
2605 && ((GET_CODE (size) == CONST_INT
2606 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2607 <= (GET_MODE_MASK (mode) >> 1)))
2608 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2609 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2610 || (*pred) (object, BLKmode))
2611 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2612 || (*pred) (opalign, VOIDmode)))
2615 rtx last = get_last_insn ();
2618 op1 = convert_to_mode (mode, size, 1);
2619 pred = insn_data[(int) code].operand[1].predicate;
2620 if (pred != 0 && ! (*pred) (op1, mode))
2621 op1 = copy_to_mode_reg (mode, op1);
2623 pat = GEN_FCN ((int) code) (object, op1, opalign);
2630 delete_insns_since (last);
2637 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2638 Return the return value of memset, 0 otherwise. */
2641 clear_storage_via_libcall (rtx object, rtx size)
2643 tree call_expr, arg_list, fn, object_tree, size_tree;
2644 enum machine_mode size_mode;
2647 /* OBJECT or SIZE may have been passed through protect_from_queue.
2649 It is unsafe to save the value generated by protect_from_queue
2650 and reuse it later. Consider what happens if emit_queue is
2651 called before the return value from protect_from_queue is used.
2653 Expansion of the CALL_EXPR below will call emit_queue before
2654 we are finished emitting RTL for argument setup. So if we are
2655 not careful we could get the wrong value for an argument.
2657 To avoid this problem we go ahead and emit code to copy OBJECT
2658 and SIZE into new pseudos. We can then place those new pseudos
2659 into an RTL_EXPR and use them later, even after a call to
2662 Note this is not strictly needed for library calls since they
2663 do not call emit_queue before loading their arguments. However,
2664 we may need to have library calls call emit_queue in the future
2665 since failing to do so could cause problems for targets which
2666 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2668 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2670 if (TARGET_MEM_FUNCTIONS)
2671 size_mode = TYPE_MODE (sizetype);
2673 size_mode = TYPE_MODE (unsigned_type_node);
2674 size = convert_to_mode (size_mode, size, 1);
2675 size = copy_to_mode_reg (size_mode, size);
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context. This could be a user call to memset and
2679 the user may wish to examine the return value from memset. For
2680 targets where libcalls and normal calls have different conventions
2681 for returning pointers, we could end up generating incorrect code.
2683 For convenience, we generate the call to bzero this way as well. */
2685 object_tree = make_tree (ptr_type_node, object);
2686 if (TARGET_MEM_FUNCTIONS)
2687 size_tree = make_tree (sizetype, size);
2689 size_tree = make_tree (unsigned_type_node, size);
2691 fn = clear_storage_libcall_fn (true);
2692 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2693 if (TARGET_MEM_FUNCTIONS)
2694 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2695 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2697 /* Now we have to build up the CALL_EXPR itself. */
2698 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2699 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2700 call_expr, arg_list, NULL_TREE);
2702 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2704 /* If we are initializing a readonly value, show the above call
2705 clobbered it. Otherwise, a load from it may erroneously be
2706 hoisted from a loop. */
2707 if (RTX_UNCHANGING_P (object))
2708 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2710 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2713 /* A subroutine of clear_storage_via_libcall. Create the tree node
2714 for the function we use for block clears. The first time FOR_CALL
2715 is true, we call assemble_external. */
2717 static GTY(()) tree block_clear_fn;
2720 init_block_clear_fn (const char *asmspec)
2722 if (!block_clear_fn)
2726 if (TARGET_MEM_FUNCTIONS)
2728 fn = get_identifier ("memset");
2729 args = build_function_type_list (ptr_type_node, ptr_type_node,
2730 integer_type_node, sizetype,
2735 fn = get_identifier ("bzero");
2736 args = build_function_type_list (void_type_node, ptr_type_node,
2737 unsigned_type_node, NULL_TREE);
2740 fn = build_decl (FUNCTION_DECL, fn, args);
2741 DECL_EXTERNAL (fn) = 1;
2742 TREE_PUBLIC (fn) = 1;
2743 DECL_ARTIFICIAL (fn) = 1;
2744 TREE_NOTHROW (fn) = 1;
2746 block_clear_fn = fn;
2751 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2752 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2757 clear_storage_libcall_fn (int for_call)
2759 static bool emitted_extern;
2761 if (!block_clear_fn)
2762 init_block_clear_fn (NULL);
2764 if (for_call && !emitted_extern)
2766 emitted_extern = true;
2767 make_decl_rtl (block_clear_fn, NULL);
2768 assemble_external (block_clear_fn);
2771 return block_clear_fn;
2774 /* Generate code to copy Y into X.
2775 Both Y and X must have the same mode, except that
2776 Y can be a constant with VOIDmode.
2777 This mode cannot be BLKmode; use emit_block_move for that.
2779 Return the last instruction emitted. */
2782 emit_move_insn (rtx x, rtx y)
2784 enum machine_mode mode = GET_MODE (x);
2785 rtx y_cst = NULL_RTX;
2788 x = protect_from_queue (x, 1);
2789 y = protect_from_queue (y, 0);
2791 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2794 /* Never force constant_p_rtx to memory. */
2795 if (GET_CODE (y) == CONSTANT_P_RTX)
2797 else if (CONSTANT_P (y))
2800 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2801 && (last_insn = compress_float_constant (x, y)))
2806 if (!LEGITIMATE_CONSTANT_P (y))
2808 y = force_const_mem (mode, y);
2810 /* If the target's cannot_force_const_mem prevented the spill,
2811 assume that the target's move expanders will also take care
2812 of the non-legitimate constant. */
2818 /* If X or Y are memory references, verify that their addresses are valid
2820 if (GET_CODE (x) == MEM
2821 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2822 && ! push_operand (x, GET_MODE (x)))
2824 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2825 x = validize_mem (x);
2827 if (GET_CODE (y) == MEM
2828 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2830 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2831 y = validize_mem (y);
2833 if (mode == BLKmode)
2836 last_insn = emit_move_insn_1 (x, y);
2838 if (y_cst && GET_CODE (x) == REG
2839 && (set = single_set (last_insn)) != NULL_RTX
2840 && SET_DEST (set) == x
2841 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2842 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2847 /* Low level part of emit_move_insn.
2848 Called just like emit_move_insn, but assumes X and Y
2849 are basically valid. */
2852 emit_move_insn_1 (rtx x, rtx y)
2854 enum machine_mode mode = GET_MODE (x);
2855 enum machine_mode submode;
2856 enum mode_class class = GET_MODE_CLASS (mode);
2858 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2861 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2863 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2865 /* Expand complex moves by moving real part and imag part, if possible. */
2866 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2867 && BLKmode != (submode = GET_MODE_INNER (mode))
2868 && (mov_optab->handlers[(int) submode].insn_code
2869 != CODE_FOR_nothing))
2871 /* Don't split destination if it is a stack push. */
2872 int stack = push_operand (x, GET_MODE (x));
2874 #ifdef PUSH_ROUNDING
2875 /* In case we output to the stack, but the size is smaller than the
2876 machine can push exactly, we need to use move instructions. */
2878 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2879 != GET_MODE_SIZE (submode)))
2882 HOST_WIDE_INT offset1, offset2;
2884 /* Do not use anti_adjust_stack, since we don't want to update
2885 stack_pointer_delta. */
2886 temp = expand_binop (Pmode,
2887 #ifdef STACK_GROWS_DOWNWARD
2895 (GET_MODE_SIZE (GET_MODE (x)))),
2896 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2898 if (temp != stack_pointer_rtx)
2899 emit_move_insn (stack_pointer_rtx, temp);
2901 #ifdef STACK_GROWS_DOWNWARD
2903 offset2 = GET_MODE_SIZE (submode);
2905 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2906 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2907 + GET_MODE_SIZE (submode));
2910 emit_move_insn (change_address (x, submode,
2911 gen_rtx_PLUS (Pmode,
2913 GEN_INT (offset1))),
2914 gen_realpart (submode, y));
2915 emit_move_insn (change_address (x, submode,
2916 gen_rtx_PLUS (Pmode,
2918 GEN_INT (offset2))),
2919 gen_imagpart (submode, y));
2923 /* If this is a stack, push the highpart first, so it
2924 will be in the argument order.
2926 In that case, change_address is used only to convert
2927 the mode, not to change the address. */
2930 /* Note that the real part always precedes the imag part in memory
2931 regardless of machine's endianness. */
2932 #ifdef STACK_GROWS_DOWNWARD
2933 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_imagpart (submode, y));
2935 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2936 gen_realpart (submode, y));
2938 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2939 gen_realpart (submode, y));
2940 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y));
2946 rtx realpart_x, realpart_y;
2947 rtx imagpart_x, imagpart_y;
2949 /* If this is a complex value with each part being smaller than a
2950 word, the usual calling sequence will likely pack the pieces into
2951 a single register. Unfortunately, SUBREG of hard registers only
2952 deals in terms of words, so we have a problem converting input
2953 arguments to the CONCAT of two registers that is used elsewhere
2954 for complex values. If this is before reload, we can copy it into
2955 memory and reload. FIXME, we should see about using extract and
2956 insert on integer registers, but complex short and complex char
2957 variables should be rarely used. */
2958 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2959 && (reload_in_progress | reload_completed) == 0)
2962 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2964 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2966 if (packed_dest_p || packed_src_p)
2968 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2969 ? MODE_FLOAT : MODE_INT);
2971 enum machine_mode reg_mode
2972 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2974 if (reg_mode != BLKmode)
2976 rtx mem = assign_stack_temp (reg_mode,
2977 GET_MODE_SIZE (mode), 0);
2978 rtx cmem = adjust_address (mem, mode, 0);
2981 = N_("function using short complex types cannot be inline");
2985 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2987 emit_move_insn_1 (cmem, y);
2988 return emit_move_insn_1 (sreg, mem);
2992 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2994 emit_move_insn_1 (mem, sreg);
2995 return emit_move_insn_1 (x, cmem);
3001 realpart_x = gen_realpart (submode, x);
3002 realpart_y = gen_realpart (submode, y);
3003 imagpart_x = gen_imagpart (submode, x);
3004 imagpart_y = gen_imagpart (submode, y);
3006 /* Show the output dies here. This is necessary for SUBREGs
3007 of pseudos since we cannot track their lifetimes correctly;
3008 hard regs shouldn't appear here except as return values.
3009 We never want to emit such a clobber after reload. */
3011 && ! (reload_in_progress || reload_completed)
3012 && (GET_CODE (realpart_x) == SUBREG
3013 || GET_CODE (imagpart_x) == SUBREG))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3016 emit_move_insn (realpart_x, realpart_y);
3017 emit_move_insn (imagpart_x, imagpart_y);
3020 return get_last_insn ();
3023 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3024 find a mode to do it in. If we have a movcc, use it. Otherwise,
3025 find the MODE_INT mode of the same width. */
3026 else if (GET_MODE_CLASS (mode) == MODE_CC
3027 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3029 enum insn_code insn_code;
3030 enum machine_mode tmode = VOIDmode;
3034 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3037 for (tmode = QImode; tmode != VOIDmode;
3038 tmode = GET_MODE_WIDER_MODE (tmode))
3039 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3042 if (tmode == VOIDmode)
3045 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3046 may call change_address which is not appropriate if we were
3047 called when a reload was in progress. We don't have to worry
3048 about changing the address since the size in bytes is supposed to
3049 be the same. Copy the MEM to change the mode and move any
3050 substitutions from the old MEM to the new one. */
3052 if (reload_in_progress)
3054 x = gen_lowpart_common (tmode, x1);
3055 if (x == 0 && GET_CODE (x1) == MEM)
3057 x = adjust_address_nv (x1, tmode, 0);
3058 copy_replacements (x1, x);
3061 y = gen_lowpart_common (tmode, y1);
3062 if (y == 0 && GET_CODE (y1) == MEM)
3064 y = adjust_address_nv (y1, tmode, 0);
3065 copy_replacements (y1, y);
3070 x = gen_lowpart (tmode, x);
3071 y = gen_lowpart (tmode, y);
3074 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3075 return emit_insn (GEN_FCN (insn_code) (x, y));
3078 /* Try using a move pattern for the corresponding integer mode. This is
3079 only safe when simplify_subreg can convert MODE constants into integer
3080 constants. At present, it can only do this reliably if the value
3081 fits within a HOST_WIDE_INT. */
3082 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3083 && (submode = int_mode_for_mode (mode)) != BLKmode
3084 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3085 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3086 (simplify_gen_subreg (submode, x, mode, 0),
3087 simplify_gen_subreg (submode, y, mode, 0)));
3089 /* This will handle any multi-word or full-word mode that lacks a move_insn
3090 pattern. However, you will get better code if you define such patterns,
3091 even if they must turn into multiple assembler instructions. */
3092 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3099 #ifdef PUSH_ROUNDING
3101 /* If X is a push on the stack, do the push now and replace
3102 X with a reference to the stack pointer. */
3103 if (push_operand (x, GET_MODE (x)))
3108 /* Do not use anti_adjust_stack, since we don't want to update
3109 stack_pointer_delta. */
3110 temp = expand_binop (Pmode,
3111 #ifdef STACK_GROWS_DOWNWARD
3119 (GET_MODE_SIZE (GET_MODE (x)))),
3120 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3122 if (temp != stack_pointer_rtx)
3123 emit_move_insn (stack_pointer_rtx, temp);
3125 code = GET_CODE (XEXP (x, 0));
3127 /* Just hope that small offsets off SP are OK. */
3128 if (code == POST_INC)
3129 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3130 GEN_INT (-((HOST_WIDE_INT)
3131 GET_MODE_SIZE (GET_MODE (x)))));
3132 else if (code == POST_DEC)
3133 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3134 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3136 temp = stack_pointer_rtx;
3138 x = change_address (x, VOIDmode, temp);
3142 /* If we are in reload, see if either operand is a MEM whose address
3143 is scheduled for replacement. */
3144 if (reload_in_progress && GET_CODE (x) == MEM
3145 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3146 x = replace_equiv_address_nv (x, inner);
3147 if (reload_in_progress && GET_CODE (y) == MEM
3148 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3149 y = replace_equiv_address_nv (y, inner);
3155 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3158 rtx xpart = operand_subword (x, i, 1, mode);
3159 rtx ypart = operand_subword (y, i, 1, mode);
3161 /* If we can't get a part of Y, put Y into memory if it is a
3162 constant. Otherwise, force it into a register. If we still
3163 can't get a part of Y, abort. */
3164 if (ypart == 0 && CONSTANT_P (y))
3166 y = force_const_mem (mode, y);
3167 ypart = operand_subword (y, i, 1, mode);
3169 else if (ypart == 0)
3170 ypart = operand_subword_force (y, i, mode);
3172 if (xpart == 0 || ypart == 0)
3175 need_clobber |= (GET_CODE (xpart) == SUBREG);
3177 last_insn = emit_move_insn (xpart, ypart);
3183 /* Show the output dies here. This is necessary for SUBREGs
3184 of pseudos since we cannot track their lifetimes correctly;
3185 hard regs shouldn't appear here except as return values.
3186 We never want to emit such a clobber after reload. */
3188 && ! (reload_in_progress || reload_completed)
3189 && need_clobber != 0)
3190 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3200 /* If Y is representable exactly in a narrower mode, and the target can
3201 perform the extension directly from constant or memory, then emit the
3202 move as an extension. */
3205 compress_float_constant (rtx x, rtx y)
3207 enum machine_mode dstmode = GET_MODE (x);
3208 enum machine_mode orig_srcmode = GET_MODE (y);
3209 enum machine_mode srcmode;
3212 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3214 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3215 srcmode != orig_srcmode;
3216 srcmode = GET_MODE_WIDER_MODE (srcmode))
3219 rtx trunc_y, last_insn;
3221 /* Skip if the target can't extend this way. */
3222 ic = can_extend_p (dstmode, srcmode, 0);
3223 if (ic == CODE_FOR_nothing)
3226 /* Skip if the narrowed value isn't exact. */
3227 if (! exact_real_truncate (srcmode, &r))
3230 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3232 if (LEGITIMATE_CONSTANT_P (trunc_y))
3234 /* Skip if the target needs extra instructions to perform
3236 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3239 else if (float_extend_from_mem[dstmode][srcmode])
3240 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3244 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3245 last_insn = get_last_insn ();
3247 if (GET_CODE (x) == REG)
3248 set_unique_reg_note (last_insn, REG_EQUAL, y);
3256 /* Pushing data onto the stack. */
3258 /* Push a block of length SIZE (perhaps variable)
3259 and return an rtx to address the beginning of the block.
3260 Note that it is not possible for the value returned to be a QUEUED.
3261 The value may be virtual_outgoing_args_rtx.
3263 EXTRA is the number of bytes of padding to push in addition to SIZE.
3264 BELOW nonzero means this padding comes at low addresses;
3265 otherwise, the padding comes at high addresses. */
3268 push_block (rtx size, int extra, int below)
3272 size = convert_modes (Pmode, ptr_mode, size, 1);
3273 if (CONSTANT_P (size))
3274 anti_adjust_stack (plus_constant (size, extra));
3275 else if (GET_CODE (size) == REG && extra == 0)
3276 anti_adjust_stack (size);
3279 temp = copy_to_mode_reg (Pmode, size);
3281 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3282 temp, 0, OPTAB_LIB_WIDEN);
3283 anti_adjust_stack (temp);
3286 #ifndef STACK_GROWS_DOWNWARD
3292 temp = virtual_outgoing_args_rtx;
3293 if (extra != 0 && below)
3294 temp = plus_constant (temp, extra);
3298 if (GET_CODE (size) == CONST_INT)
3299 temp = plus_constant (virtual_outgoing_args_rtx,
3300 -INTVAL (size) - (below ? 0 : extra));
3301 else if (extra != 0 && !below)
3302 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3303 negate_rtx (Pmode, plus_constant (size, extra)));
3305 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3306 negate_rtx (Pmode, size));
3309 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3312 #ifdef PUSH_ROUNDING
3314 /* Emit single push insn. */
3317 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3320 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3322 enum insn_code icode;
3323 insn_operand_predicate_fn pred;
3325 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3326 /* If there is push pattern, use it. Otherwise try old way of throwing
3327 MEM representing push operation to move expander. */
3328 icode = push_optab->handlers[(int) mode].insn_code;
3329 if (icode != CODE_FOR_nothing)
3331 if (((pred = insn_data[(int) icode].operand[0].predicate)
3332 && !((*pred) (x, mode))))
3333 x = force_reg (mode, x);
3334 emit_insn (GEN_FCN (icode) (x));
3337 if (GET_MODE_SIZE (mode) == rounded_size)
3338 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3339 /* If we are to pad downward, adjust the stack pointer first and
3340 then store X into the stack location using an offset. This is
3341 because emit_move_insn does not know how to pad; it does not have
3343 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3345 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3346 HOST_WIDE_INT offset;
3348 emit_move_insn (stack_pointer_rtx,
3349 expand_binop (Pmode,
3350 #ifdef STACK_GROWS_DOWNWARD
3356 GEN_INT (rounded_size),
3357 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3359 offset = (HOST_WIDE_INT) padding_size;
3360 #ifdef STACK_GROWS_DOWNWARD
3361 if (STACK_PUSH_CODE == POST_DEC)
3362 /* We have already decremented the stack pointer, so get the
3364 offset += (HOST_WIDE_INT) rounded_size;
3366 if (STACK_PUSH_CODE == POST_INC)
3367 /* We have already incremented the stack pointer, so get the
3369 offset -= (HOST_WIDE_INT) rounded_size;
3371 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3375 #ifdef STACK_GROWS_DOWNWARD
3376 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3377 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3378 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3382 GEN_INT (rounded_size));
3384 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3387 dest = gen_rtx_MEM (mode, dest_addr);
3391 set_mem_attributes (dest, type, 1);
3393 if (flag_optimize_sibling_calls)
3394 /* Function incoming arguments may overlap with sibling call
3395 outgoing arguments and we cannot allow reordering of reads
3396 from function arguments with stores to outgoing arguments
3397 of sibling calls. */
3398 set_mem_alias_set (dest, 0);
3400 emit_move_insn (dest, x);
3404 /* Generate code to push X onto the stack, assuming it has mode MODE and
3406 MODE is redundant except when X is a CONST_INT (since they don't
3408 SIZE is an rtx for the size of data to be copied (in bytes),
3409 needed only if X is BLKmode.
3411 ALIGN (in bits) is maximum alignment we can assume.
3413 If PARTIAL and REG are both nonzero, then copy that many of the first
3414 words of X into registers starting with REG, and push the rest of X.
3415 The amount of space pushed is decreased by PARTIAL words,
3416 rounded *down* to a multiple of PARM_BOUNDARY.
3417 REG must be a hard register in this case.
3418 If REG is zero but PARTIAL is not, take any all others actions for an
3419 argument partially in registers, but do not actually load any
3422 EXTRA is the amount in bytes of extra space to leave next to this arg.
3423 This is ignored if an argument block has already been allocated.
3425 On a machine that lacks real push insns, ARGS_ADDR is the address of
3426 the bottom of the argument block for this call. We use indexing off there
3427 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3428 argument block has not been preallocated.
3430 ARGS_SO_FAR is the size of args previously pushed for this call.
3432 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3433 for arguments passed in registers. If nonzero, it will be the number
3434 of bytes required. */
3437 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3438 unsigned int align, int partial, rtx reg, int extra,
3439 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3443 enum direction stack_direction
3444 #ifdef STACK_GROWS_DOWNWARD
3450 /* Decide where to pad the argument: `downward' for below,
3451 `upward' for above, or `none' for don't pad it.
3452 Default is below for small data on big-endian machines; else above. */
3453 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3455 /* Invert direction if stack is post-decrement.
3457 if (STACK_PUSH_CODE == POST_DEC)
3458 if (where_pad != none)
3459 where_pad = (where_pad == downward ? upward : downward);
3461 xinner = x = protect_from_queue (x, 0);
3463 if (mode == BLKmode)
3465 /* Copy a block into the stack, entirely or partially. */
3468 int used = partial * UNITS_PER_WORD;
3469 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3477 /* USED is now the # of bytes we need not copy to the stack
3478 because registers will take care of them. */
3481 xinner = adjust_address (xinner, BLKmode, used);
3483 /* If the partial register-part of the arg counts in its stack size,
3484 skip the part of stack space corresponding to the registers.
3485 Otherwise, start copying to the beginning of the stack space,
3486 by setting SKIP to 0. */
3487 skip = (reg_parm_stack_space == 0) ? 0 : used;
3489 #ifdef PUSH_ROUNDING
3490 /* Do it with several push insns if that doesn't take lots of insns
3491 and if there is no difficulty with push insns that skip bytes
3492 on the stack for alignment purposes. */
3495 && GET_CODE (size) == CONST_INT
3497 && MEM_ALIGN (xinner) >= align
3498 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3499 /* Here we avoid the case of a structure whose weak alignment
3500 forces many pushes of a small amount of data,
3501 and such small pushes do rounding that causes trouble. */
3502 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3503 || align >= BIGGEST_ALIGNMENT
3504 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3505 == (align / BITS_PER_UNIT)))
3506 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3508 /* Push padding now if padding above and stack grows down,
3509 or if padding below and stack grows up.
3510 But if space already allocated, this has already been done. */
3511 if (extra && args_addr == 0
3512 && where_pad != none && where_pad != stack_direction)
3513 anti_adjust_stack (GEN_INT (extra));
3515 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3518 #endif /* PUSH_ROUNDING */
3522 /* Otherwise make space on the stack and copy the data
3523 to the address of that space. */
3525 /* Deduct words put into registers from the size we must copy. */
3528 if (GET_CODE (size) == CONST_INT)
3529 size = GEN_INT (INTVAL (size) - used);
3531 size = expand_binop (GET_MODE (size), sub_optab, size,
3532 GEN_INT (used), NULL_RTX, 0,
3536 /* Get the address of the stack space.
3537 In this case, we do not deal with EXTRA separately.
3538 A single stack adjust will do. */
3541 temp = push_block (size, extra, where_pad == downward);
3544 else if (GET_CODE (args_so_far) == CONST_INT)
3545 temp = memory_address (BLKmode,
3546 plus_constant (args_addr,
3547 skip + INTVAL (args_so_far)));
3549 temp = memory_address (BLKmode,
3550 plus_constant (gen_rtx_PLUS (Pmode,
3555 if (!ACCUMULATE_OUTGOING_ARGS)
3557 /* If the source is referenced relative to the stack pointer,
3558 copy it to another register to stabilize it. We do not need
3559 to do this if we know that we won't be changing sp. */
3561 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3562 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3563 temp = copy_to_reg (temp);
3566 target = gen_rtx_MEM (BLKmode, temp);
3570 set_mem_attributes (target, type, 1);
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 set_mem_alias_set (target, 0);
3578 /* ALIGN may well be better aligned than TYPE, e.g. due to
3579 PARM_BOUNDARY. Assume the caller isn't lying. */
3580 set_mem_align (target, align);
3582 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3585 else if (partial > 0)
3587 /* Scalar partly in registers. */
3589 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3592 /* # words of start of argument
3593 that we must make space for but need not store. */
3594 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3595 int args_offset = INTVAL (args_so_far);
3598 /* Push padding now if padding above and stack grows down,
3599 or if padding below and stack grows up.
3600 But if space already allocated, this has already been done. */
3601 if (extra && args_addr == 0
3602 && where_pad != none && where_pad != stack_direction)
3603 anti_adjust_stack (GEN_INT (extra));
3605 /* If we make space by pushing it, we might as well push
3606 the real data. Otherwise, we can leave OFFSET nonzero
3607 and leave the space uninitialized. */
3611 /* Now NOT_STACK gets the number of words that we don't need to
3612 allocate on the stack. */
3613 not_stack = partial - offset;
3615 /* If the partial register-part of the arg counts in its stack size,
3616 skip the part of stack space corresponding to the registers.
3617 Otherwise, start copying to the beginning of the stack space,
3618 by setting SKIP to 0. */
3619 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3621 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3622 x = validize_mem (force_const_mem (mode, x));
3624 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3625 SUBREGs of such registers are not allowed. */
3626 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3627 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3628 x = copy_to_reg (x);
3630 /* Loop over all the words allocated on the stack for this arg. */
3631 /* We can do it by words, because any scalar bigger than a word
3632 has a size a multiple of a word. */
3633 #ifndef PUSH_ARGS_REVERSED
3634 for (i = not_stack; i < size; i++)
3636 for (i = size - 1; i >= not_stack; i--)
3638 if (i >= not_stack + offset)
3639 emit_push_insn (operand_subword_force (x, i, mode),
3640 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3642 GEN_INT (args_offset + ((i - not_stack + skip)
3644 reg_parm_stack_space, alignment_pad);
3651 /* Push padding now if padding above and stack grows down,
3652 or if padding below and stack grows up.
3653 But if space already allocated, this has already been done. */
3654 if (extra && args_addr == 0
3655 && where_pad != none && where_pad != stack_direction)
3656 anti_adjust_stack (GEN_INT (extra));
3658 #ifdef PUSH_ROUNDING
3659 if (args_addr == 0 && PUSH_ARGS)
3660 emit_single_push_insn (mode, x, type);
3664 if (GET_CODE (args_so_far) == CONST_INT)
3666 = memory_address (mode,
3667 plus_constant (args_addr,
3668 INTVAL (args_so_far)));
3670 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3672 dest = gen_rtx_MEM (mode, addr);
3675 set_mem_attributes (dest, type, 1);
3676 /* Function incoming arguments may overlap with sibling call
3677 outgoing arguments and we cannot allow reordering of reads
3678 from function arguments with stores to outgoing arguments
3679 of sibling calls. */
3680 set_mem_alias_set (dest, 0);
3683 emit_move_insn (dest, x);
3687 /* If part should go in registers, copy that part
3688 into the appropriate registers. Do this now, at the end,
3689 since mem-to-mem copies above may do function calls. */
3690 if (partial > 0 && reg != 0)
3692 /* Handle calls that pass values in multiple non-contiguous locations.
3693 The Irix 6 ABI has examples of this. */
3694 if (GET_CODE (reg) == PARALLEL)
3695 emit_group_load (reg, x, type, -1);
3697 move_block_to_reg (REGNO (reg), x, partial, mode);
3700 if (extra && args_addr == 0 && where_pad == stack_direction)
3701 anti_adjust_stack (GEN_INT (extra));
3703 if (alignment_pad && args_addr == 0)
3704 anti_adjust_stack (alignment_pad);
3707 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3711 get_subtarget (rtx x)
3714 /* Only registers can be subtargets. */
3715 || GET_CODE (x) != REG
3716 /* If the register is readonly, it can't be set more than once. */
3717 || RTX_UNCHANGING_P (x)
3718 /* Don't use hard regs to avoid extending their life. */
3719 || REGNO (x) < FIRST_PSEUDO_REGISTER
3720 /* Avoid subtargets inside loops,
3721 since they hide some invariant expressions. */
3722 || preserve_subexpressions_p ())
3726 /* Expand an assignment that stores the value of FROM into TO.
3727 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3728 (This may contain a QUEUED rtx;
3729 if the value is constant, this rtx is a constant.)
3730 Otherwise, the returned value is NULL_RTX. */
3733 expand_assignment (tree to, tree from, int want_value)
3738 /* Don't crash if the lhs of the assignment was erroneous. */
3740 if (TREE_CODE (to) == ERROR_MARK)
3742 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3743 return want_value ? result : NULL_RTX;
3746 /* Assignment of a structure component needs special treatment
3747 if the structure component's rtx is not simply a MEM.
3748 Assignment of an array element at a constant index, and assignment of
3749 an array element in an unaligned packed structure field, has the same
3752 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3753 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3754 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3756 enum machine_mode mode1;
3757 HOST_WIDE_INT bitsize, bitpos;
3765 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3766 &unsignedp, &volatilep);
3768 /* If we are going to use store_bit_field and extract_bit_field,
3769 make sure to_rtx will be safe for multiple use. */
3771 if (mode1 == VOIDmode && want_value)
3772 tem = stabilize_reference (tem);
3774 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3778 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3780 if (GET_CODE (to_rtx) != MEM)
3783 #ifdef POINTERS_EXTEND_UNSIGNED
3784 if (GET_MODE (offset_rtx) != Pmode)
3785 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3787 if (GET_MODE (offset_rtx) != ptr_mode)
3788 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3791 /* A constant address in TO_RTX can have VOIDmode, we must not try
3792 to call force_reg for that case. Avoid that case. */
3793 if (GET_CODE (to_rtx) == MEM
3794 && GET_MODE (to_rtx) == BLKmode
3795 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3797 && (bitpos % bitsize) == 0
3798 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3799 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3801 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3805 to_rtx = offset_address (to_rtx, offset_rtx,
3806 highest_pow2_factor_for_type (TREE_TYPE (to),
3810 if (GET_CODE (to_rtx) == MEM)
3812 /* If the field is at offset zero, we could have been given the
3813 DECL_RTX of the parent struct. Don't munge it. */
3814 to_rtx = shallow_copy_rtx (to_rtx);
3816 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3819 /* Deal with volatile and readonly fields. The former is only done
3820 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3821 if (volatilep && GET_CODE (to_rtx) == MEM)
3823 if (to_rtx == orig_to_rtx)
3824 to_rtx = copy_rtx (to_rtx);
3825 MEM_VOLATILE_P (to_rtx) = 1;
3828 if (TREE_CODE (to) == COMPONENT_REF
3829 && TREE_READONLY (TREE_OPERAND (to, 1))
3830 /* We can't assert that a MEM won't be set more than once
3831 if the component is not addressable because another
3832 non-addressable component may be referenced by the same MEM. */
3833 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3835 if (to_rtx == orig_to_rtx)
3836 to_rtx = copy_rtx (to_rtx);
3837 RTX_UNCHANGING_P (to_rtx) = 1;
3840 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3842 if (to_rtx == orig_to_rtx)
3843 to_rtx = copy_rtx (to_rtx);
3844 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3847 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3849 /* Spurious cast for HPUX compiler. */
3850 ? ((enum machine_mode)
3851 TYPE_MODE (TREE_TYPE (to)))
3853 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3855 preserve_temp_slots (result);
3859 /* If the value is meaningful, convert RESULT to the proper mode.
3860 Otherwise, return nothing. */
3861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3862 TYPE_MODE (TREE_TYPE (from)),
3864 TREE_UNSIGNED (TREE_TYPE (to)))
3868 /* If the rhs is a function call and its value is not an aggregate,
3869 call the function before we start to compute the lhs.
3870 This is needed for correct code for cases such as
3871 val = setjmp (buf) on machines where reference to val
3872 requires loading up part of an address in a separate insn.
3874 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3875 since it might be a promoted variable where the zero- or sign- extension
3876 needs to be done. Handling this in the normal way is safe because no
3877 computation is done before the call. */
3878 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3879 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3880 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3881 && GET_CODE (DECL_RTL (to)) == REG))
3886 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3888 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3890 /* Handle calls that return values in multiple non-contiguous locations.
3891 The Irix 6 ABI has examples of this. */
3892 if (GET_CODE (to_rtx) == PARALLEL)
3893 emit_group_load (to_rtx, value, TREE_TYPE (from),
3894 int_size_in_bytes (TREE_TYPE (from)));
3895 else if (GET_MODE (to_rtx) == BLKmode)
3896 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3899 if (POINTER_TYPE_P (TREE_TYPE (to)))
3900 value = convert_memory_address (GET_MODE (to_rtx), value);
3901 emit_move_insn (to_rtx, value);
3903 preserve_temp_slots (to_rtx);
3906 return want_value ? to_rtx : NULL_RTX;
3909 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3910 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3913 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3915 /* Don't move directly into a return register. */
3916 if (TREE_CODE (to) == RESULT_DECL
3917 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3922 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3924 if (GET_CODE (to_rtx) == PARALLEL)
3925 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3926 int_size_in_bytes (TREE_TYPE (from)));
3928 emit_move_insn (to_rtx, temp);
3930 preserve_temp_slots (to_rtx);
3933 return want_value ? to_rtx : NULL_RTX;
3936 /* In case we are returning the contents of an object which overlaps
3937 the place the value is being stored, use a safe function when copying
3938 a value through a pointer into a structure value return block. */
3939 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3940 && current_function_returns_struct
3941 && !current_function_returns_pcc_struct)
3946 size = expr_size (from);
3947 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3949 if (TARGET_MEM_FUNCTIONS)
3950 emit_library_call (memmove_libfunc, LCT_NORMAL,
3951 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3952 XEXP (from_rtx, 0), Pmode,
3953 convert_to_mode (TYPE_MODE (sizetype),
3954 size, TREE_UNSIGNED (sizetype)),
3955 TYPE_MODE (sizetype));
3957 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3959 XEXP (to_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (integer_type_node),
3962 TREE_UNSIGNED (integer_type_node)),
3963 TYPE_MODE (integer_type_node));
3965 preserve_temp_slots (to_rtx);
3968 return want_value ? to_rtx : NULL_RTX;
3971 /* Compute FROM and store the value in the rtx we got. */
3974 result = store_expr (from, to_rtx, want_value);
3975 preserve_temp_slots (result);
3978 return want_value ? result : NULL_RTX;
3981 /* Generate code for computing expression EXP,
3982 and storing the value into TARGET.
3983 TARGET may contain a QUEUED rtx.
3985 If WANT_VALUE & 1 is nonzero, return a copy of the value
3986 not in TARGET, so that we can be sure to use the proper
3987 value in a containing expression even if TARGET has something
3988 else stored in it. If possible, we copy the value through a pseudo
3989 and return that pseudo. Or, if the value is constant, we try to
3990 return the constant. In some cases, we return a pseudo
3991 copied *from* TARGET.
3993 If the mode is BLKmode then we may return TARGET itself.
3994 It turns out that in BLKmode it doesn't cause a problem.
3995 because C has no operators that could combine two different
3996 assignments into the same BLKmode object with different values
3997 with no sequence point. Will other languages need this to
4000 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4001 to catch quickly any cases where the caller uses the value
4002 and fails to set WANT_VALUE.
4004 If WANT_VALUE & 2 is set, this is a store into a call param on the
4005 stack, and block moves may need to be treated specially. */
4008 store_expr (tree exp, rtx target, int want_value)
4011 int dont_return_target = 0;
4012 int dont_store_target = 0;
4014 if (VOID_TYPE_P (TREE_TYPE (exp)))
4016 /* C++ can generate ?: expressions with a throw expression in one
4017 branch and an rvalue in the other. Here, we resolve attempts to
4018 store the throw expression's nonexistent result. */
4021 expand_expr (exp, const0_rtx, VOIDmode, 0);
4024 if (TREE_CODE (exp) == COMPOUND_EXPR)
4026 /* Perform first part of compound expression, then assign from second
4028 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4029 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4031 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4033 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4035 /* For conditional expression, get safe form of the target. Then
4036 test the condition, doing the appropriate assignment on either
4037 side. This avoids the creation of unnecessary temporaries.
4038 For non-BLKmode, it is more efficient not to do this. */
4040 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4043 target = protect_from_queue (target, 1);
4045 do_pending_stack_adjust ();
4047 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4048 start_cleanup_deferral ();
4049 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4050 end_cleanup_deferral ();
4052 emit_jump_insn (gen_jump (lab2));
4055 start_cleanup_deferral ();
4056 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4057 end_cleanup_deferral ();
4062 return want_value & 1 ? target : NULL_RTX;
4064 else if (queued_subexp_p (target))
4065 /* If target contains a postincrement, let's not risk
4066 using it as the place to generate the rhs. */
4068 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4070 /* Expand EXP into a new pseudo. */
4071 temp = gen_reg_rtx (GET_MODE (target));
4072 temp = expand_expr (exp, temp, GET_MODE (target),
4074 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4077 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4079 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4081 /* If target is volatile, ANSI requires accessing the value
4082 *from* the target, if it is accessed. So make that happen.
4083 In no case return the target itself. */
4084 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4085 dont_return_target = 1;
4087 else if ((want_value & 1) != 0
4088 && GET_CODE (target) == MEM
4089 && ! MEM_VOLATILE_P (target)
4090 && GET_MODE (target) != BLKmode)
4091 /* If target is in memory and caller wants value in a register instead,
4092 arrange that. Pass TARGET as target for expand_expr so that,
4093 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4094 We know expand_expr will not use the target in that case.
4095 Don't do this if TARGET is volatile because we are supposed
4096 to write it and then read it. */
4098 temp = expand_expr (exp, target, GET_MODE (target),
4099 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4100 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4102 /* If TEMP is already in the desired TARGET, only copy it from
4103 memory and don't store it there again. */
4105 || (rtx_equal_p (temp, target)
4106 && ! side_effects_p (temp) && ! side_effects_p (target)))
4107 dont_store_target = 1;
4108 temp = copy_to_reg (temp);
4110 dont_return_target = 1;
4112 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4113 /* If this is a scalar in a register that is stored in a wider mode
4114 than the declared mode, compute the result into its declared mode
4115 and then convert to the wider mode. Our value is the computed
4118 rtx inner_target = 0;
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if ((want_value & 1) == 0
4127 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4128 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4130 if (TREE_UNSIGNED (TREE_TYPE (exp))
4131 != SUBREG_PROMOTED_UNSIGNED_P (target))
4133 ((*lang_hooks.types.signed_or_unsigned_type)
4134 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4136 exp = convert ((*lang_hooks.types.type_for_mode)
4137 (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4141 inner_target = SUBREG_REG (target);
4144 temp = expand_expr (exp, inner_target, VOIDmode,
4145 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4147 /* If TEMP is a MEM and we want a result value, make the access
4148 now so it gets done only once. Strictly speaking, this is
4149 only necessary if the MEM is volatile, or if the address
4150 overlaps TARGET. But not performing the load twice also
4151 reduces the amount of rtl we generate and then have to CSE. */
4152 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4153 temp = copy_to_reg (temp);
4155 /* If TEMP is a VOIDmode constant, use convert_modes to make
4156 sure that we properly convert it. */
4157 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4159 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4160 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4161 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4162 GET_MODE (target), temp,
4163 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 convert_move (SUBREG_REG (target), temp,
4167 SUBREG_PROMOTED_UNSIGNED_P (target));
4169 /* If we promoted a constant, change the mode back down to match
4170 target. Otherwise, the caller might get confused by a result whose
4171 mode is larger than expected. */
4173 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4175 if (GET_MODE (temp) != VOIDmode)
4177 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4178 SUBREG_PROMOTED_VAR_P (temp) = 1;
4179 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4180 SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (target),
4184 GET_MODE (SUBREG_REG (target)),
4185 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4188 return want_value & 1 ? temp : NULL_RTX;
4192 temp = expand_expr (exp, target, GET_MODE (target),
4193 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 /* If value was not generated in the target, store it there.
4220 Convert the value to TARGET's type first if necessary.
4221 If TEMP and TARGET compare equal according to rtx_equal_p, but
4222 one or both of them are volatile memory refs, we have to distinguish
4224 - expand_expr has used TARGET. In this case, we must not generate
4225 another copy. This can be detected by TARGET being equal according
4227 - expand_expr has not used TARGET - that means that the source just
4228 happens to have the same RTX form. Since temp will have been created
4229 by expand_expr, it will compare unequal according to == .
4230 We must generate a copy in this case, to reach the correct number
4231 of volatile memory references. */
4233 if ((! rtx_equal_p (temp, target)
4234 || (temp != target && (side_effects_p (temp)
4235 || side_effects_p (target))))
4236 && TREE_CODE (exp) != ERROR_MARK
4237 && ! dont_store_target
4238 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4239 but TARGET is not valid memory reference, TEMP will differ
4240 from TARGET although it is really the same location. */
4241 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4242 || target != DECL_RTL_IF_SET (exp))
4243 /* If there's nothing to copy, don't bother. Don't call expr_size
4244 unless necessary, because some front-ends (C++) expr_size-hook
4245 aborts on objects that are not supposed to be bit-copied or
4247 && expr_size (exp) != const0_rtx)
4249 target = protect_from_queue (target, 1);
4250 if (GET_MODE (temp) != GET_MODE (target)
4251 && GET_MODE (temp) != VOIDmode)
4253 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4254 if (dont_return_target)
4256 /* In this case, we will return TEMP,
4257 so make sure it has the proper mode.
4258 But don't forget to store the value into TARGET. */
4259 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4260 emit_move_insn (target, temp);
4263 convert_move (target, temp, unsignedp);
4266 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4268 /* Handle copying a string constant into an array. The string
4269 constant may be shorter than the array. So copy just the string's
4270 actual length, and clear the rest. First get the size of the data
4271 type of the string, which is actually the size of the target. */
4272 rtx size = expr_size (exp);
4274 if (GET_CODE (size) == CONST_INT
4275 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4276 emit_block_move (target, temp, size,
4278 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4287 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4289 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4292 /* Copy that much. */
4293 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4294 TREE_UNSIGNED (sizetype));
4295 emit_block_move (target, temp, copy_size_rtx,
4297 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4299 /* Figure out how much is left in TARGET that we have to clear.
4300 Do all calculations in ptr_mode. */
4301 if (GET_CODE (copy_size_rtx) == CONST_INT)
4303 size = plus_constant (size, -INTVAL (copy_size_rtx));
4304 target = adjust_address (target, BLKmode,
4305 INTVAL (copy_size_rtx));
4309 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4310 copy_size_rtx, NULL_RTX, 0,
4313 #ifdef POINTERS_EXTEND_UNSIGNED
4314 if (GET_MODE (copy_size_rtx) != Pmode)
4315 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4316 TREE_UNSIGNED (sizetype));
4319 target = offset_address (target, copy_size_rtx,
4320 highest_pow2_factor (copy_size));
4321 label = gen_label_rtx ();
4322 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4323 GET_MODE (size), 0, label);
4326 if (size != const0_rtx)
4327 clear_storage (target, size);
4333 /* Handle calls that return values in multiple non-contiguous locations.
4334 The Irix 6 ABI has examples of this. */
4335 else if (GET_CODE (target) == PARALLEL)
4336 emit_group_load (target, temp, TREE_TYPE (exp),
4337 int_size_in_bytes (TREE_TYPE (exp)));
4338 else if (GET_MODE (temp) == BLKmode)
4339 emit_block_move (target, temp, expr_size (exp),
4341 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4343 emit_move_insn (target, temp);
4346 /* If we don't want a value, return NULL_RTX. */
4347 if ((want_value & 1) == 0)
4350 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4351 ??? The latter test doesn't seem to make sense. */
4352 else if (dont_return_target && GET_CODE (temp) != MEM)
4355 /* Return TARGET itself if it is a hard register. */
4356 else if ((want_value & 1) != 0
4357 && GET_MODE (target) != BLKmode
4358 && ! (GET_CODE (target) == REG
4359 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4360 return copy_to_reg (target);
4366 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4369 is_zeros_p (tree exp)
4373 switch (TREE_CODE (exp))
4377 case NON_LVALUE_EXPR:
4378 case VIEW_CONVERT_EXPR:
4379 return is_zeros_p (TREE_OPERAND (exp, 0));
4382 return integer_zerop (exp);
4386 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4389 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4392 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4393 elt = TREE_CHAIN (elt))
4394 if (!is_zeros_p (TREE_VALUE (elt)))
4400 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4401 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4402 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4403 if (! is_zeros_p (TREE_VALUE (elt)))
4413 /* Return 1 if EXP contains mostly (3/4) zeros. */
4416 mostly_zeros_p (tree exp)
4418 if (TREE_CODE (exp) == CONSTRUCTOR)
4420 int elts = 0, zeros = 0;
4421 tree elt = CONSTRUCTOR_ELTS (exp);
4422 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4424 /* If there are no ranges of true bits, it is all zero. */
4425 return elt == NULL_TREE;
4427 for (; elt; elt = TREE_CHAIN (elt))
4429 /* We do not handle the case where the index is a RANGE_EXPR,
4430 so the statistic will be somewhat inaccurate.
4431 We do make a more accurate count in store_constructor itself,
4432 so since this function is only used for nested array elements,
4433 this should be close enough. */
4434 if (mostly_zeros_p (TREE_VALUE (elt)))
4439 return 4 * zeros >= 3 * elts;
4442 return is_zeros_p (exp);
4445 /* Helper function for store_constructor.
4446 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4447 TYPE is the type of the CONSTRUCTOR, not the element type.
4448 CLEARED is as for store_constructor.
4449 ALIAS_SET is the alias set to use for any stores.
4451 This provides a recursive shortcut back to store_constructor when it isn't
4452 necessary to go through store_field. This is so that we can pass through
4453 the cleared field to let store_constructor know that we may not have to
4454 clear a substructure if the outer structure has already been cleared. */
4457 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4458 HOST_WIDE_INT bitpos, enum machine_mode mode,
4459 tree exp, tree type, int cleared, int alias_set)
4461 if (TREE_CODE (exp) == CONSTRUCTOR
4462 && bitpos % BITS_PER_UNIT == 0
4463 /* If we have a nonzero bitpos for a register target, then we just
4464 let store_field do the bitfield handling. This is unlikely to
4465 generate unnecessary clear instructions anyways. */
4466 && (bitpos == 0 || GET_CODE (target) == MEM))
4468 if (GET_CODE (target) == MEM)
4470 = adjust_address (target,
4471 GET_MODE (target) == BLKmode
4473 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4474 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4477 /* Update the alias set, if required. */
4478 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4479 && MEM_ALIAS_SET (target) != 0)
4481 target = copy_rtx (target);
4482 set_mem_alias_set (target, alias_set);
4485 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4488 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4492 /* Store the value of constructor EXP into the rtx TARGET.
4493 TARGET is either a REG or a MEM; we know it cannot conflict, since
4494 safe_from_p has been called.
4495 CLEARED is true if TARGET is known to have been zero'd.
4496 SIZE is the number of bytes of TARGET we are allowed to modify: this
4497 may not be the same as the size of EXP if we are assigning to a field
4498 which has been packed to exclude padding bits. */
4501 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4503 tree type = TREE_TYPE (exp);
4504 #ifdef WORD_REGISTER_OPERATIONS
4505 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4508 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4509 || TREE_CODE (type) == QUAL_UNION_TYPE)
4513 /* If size is zero or the target is already cleared, do nothing. */
4514 if (size == 0 || cleared)
4516 /* We either clear the aggregate or indicate the value is dead. */
4517 else if ((TREE_CODE (type) == UNION_TYPE
4518 || TREE_CODE (type) == QUAL_UNION_TYPE)
4519 && ! CONSTRUCTOR_ELTS (exp))
4520 /* If the constructor is empty, clear the union. */
4522 clear_storage (target, expr_size (exp));
4526 /* If we are building a static constructor into a register,
4527 set the initial value as zero so we can fold the value into
4528 a constant. But if more than one register is involved,
4529 this probably loses. */
4530 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4531 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4533 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4537 /* If the constructor has fewer fields than the structure
4538 or if we are initializing the structure to mostly zeros,
4539 clear the whole structure first. Don't do this if TARGET is a
4540 register whose mode size isn't equal to SIZE since clear_storage
4541 can't handle this case. */
4542 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4543 || mostly_zeros_p (exp))
4544 && (GET_CODE (target) != REG
4545 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4548 rtx xtarget = target;
4550 if (readonly_fields_p (type))
4552 xtarget = copy_rtx (xtarget);
4553 RTX_UNCHANGING_P (xtarget) = 1;
4556 clear_storage (xtarget, GEN_INT (size));
4561 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4563 /* Store each element of the constructor into
4564 the corresponding field of TARGET. */
4566 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4568 tree field = TREE_PURPOSE (elt);
4569 tree value = TREE_VALUE (elt);
4570 enum machine_mode mode;
4571 HOST_WIDE_INT bitsize;
4572 HOST_WIDE_INT bitpos = 0;
4574 rtx to_rtx = target;
4576 /* Just ignore missing fields.
4577 We cleared the whole structure, above,
4578 if any fields are missing. */
4582 if (cleared && is_zeros_p (value))
4585 if (host_integerp (DECL_SIZE (field), 1))
4586 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4590 mode = DECL_MODE (field);
4591 if (DECL_BIT_FIELD (field))
4594 offset = DECL_FIELD_OFFSET (field);
4595 if (host_integerp (offset, 0)
4596 && host_integerp (bit_position (field), 0))
4598 bitpos = int_bit_position (field);
4602 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4608 if (CONTAINS_PLACEHOLDER_P (offset))
4609 offset = build (WITH_RECORD_EXPR, sizetype,
4610 offset, make_tree (TREE_TYPE (exp), target));
4612 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4613 if (GET_CODE (to_rtx) != MEM)
4616 #ifdef POINTERS_EXTEND_UNSIGNED
4617 if (GET_MODE (offset_rtx) != Pmode)
4618 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4620 if (GET_MODE (offset_rtx) != ptr_mode)
4621 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4624 to_rtx = offset_address (to_rtx, offset_rtx,
4625 highest_pow2_factor (offset));
4628 if (TREE_READONLY (field))
4630 if (GET_CODE (to_rtx) == MEM)
4631 to_rtx = copy_rtx (to_rtx);
4633 RTX_UNCHANGING_P (to_rtx) = 1;
4636 #ifdef WORD_REGISTER_OPERATIONS
4637 /* If this initializes a field that is smaller than a word, at the
4638 start of a word, try to widen it to a full word.
4639 This special case allows us to output C++ member function
4640 initializations in a form that the optimizers can understand. */
4641 if (GET_CODE (target) == REG
4642 && bitsize < BITS_PER_WORD
4643 && bitpos % BITS_PER_WORD == 0
4644 && GET_MODE_CLASS (mode) == MODE_INT
4645 && TREE_CODE (value) == INTEGER_CST
4647 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4649 tree type = TREE_TYPE (value);
4651 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4653 type = (*lang_hooks.types.type_for_size)
4654 (BITS_PER_WORD, TREE_UNSIGNED (type));
4655 value = convert (type, value);
4658 if (BYTES_BIG_ENDIAN)
4660 = fold (build (LSHIFT_EXPR, type, value,
4661 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4662 bitsize = BITS_PER_WORD;
4667 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4668 && DECL_NONADDRESSABLE_P (field))
4670 to_rtx = copy_rtx (to_rtx);
4671 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4674 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4675 value, type, cleared,
4676 get_alias_set (TREE_TYPE (field)));
4679 else if (TREE_CODE (type) == ARRAY_TYPE
4680 || TREE_CODE (type) == VECTOR_TYPE)
4685 tree domain = TYPE_DOMAIN (type);
4686 tree elttype = TREE_TYPE (type);
4688 HOST_WIDE_INT minelt = 0;
4689 HOST_WIDE_INT maxelt = 0;
4691 /* Vectors are like arrays, but the domain is stored via an array
4693 if (TREE_CODE (type) == VECTOR_TYPE)
4695 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4696 the same field as TYPE_DOMAIN, we are not guaranteed that
4698 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4699 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4702 const_bounds_p = (TYPE_MIN_VALUE (domain)
4703 && TYPE_MAX_VALUE (domain)
4704 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4705 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4707 /* If we have constant bounds for the range of the type, get them. */
4710 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4711 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4714 /* If the constructor has fewer elements than the array,
4715 clear the whole array first. Similarly if this is
4716 static constructor of a non-BLKmode object. */
4717 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4721 HOST_WIDE_INT count = 0, zero_count = 0;
4722 need_to_clear = ! const_bounds_p;
4724 /* This loop is a more accurate version of the loop in
4725 mostly_zeros_p (it handles RANGE_EXPR in an index).
4726 It is also needed to check for missing elements. */
4727 for (elt = CONSTRUCTOR_ELTS (exp);
4728 elt != NULL_TREE && ! need_to_clear;
4729 elt = TREE_CHAIN (elt))
4731 tree index = TREE_PURPOSE (elt);
4732 HOST_WIDE_INT this_node_count;
4734 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4736 tree lo_index = TREE_OPERAND (index, 0);
4737 tree hi_index = TREE_OPERAND (index, 1);
4739 if (! host_integerp (lo_index, 1)
4740 || ! host_integerp (hi_index, 1))
4746 this_node_count = (tree_low_cst (hi_index, 1)
4747 - tree_low_cst (lo_index, 1) + 1);
4750 this_node_count = 1;
4752 count += this_node_count;
4753 if (mostly_zeros_p (TREE_VALUE (elt)))
4754 zero_count += this_node_count;
4757 /* Clear the entire array first if there are any missing elements,
4758 or if the incidence of zero elements is >= 75%. */
4760 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4764 if (need_to_clear && size > 0)
4769 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4771 clear_storage (target, GEN_INT (size));
4775 else if (REG_P (target))
4776 /* Inform later passes that the old value is dead. */
4777 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4779 /* Store each element of the constructor into
4780 the corresponding element of TARGET, determined
4781 by counting the elements. */
4782 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4784 elt = TREE_CHAIN (elt), i++)
4786 enum machine_mode mode;
4787 HOST_WIDE_INT bitsize;
4788 HOST_WIDE_INT bitpos;
4790 tree value = TREE_VALUE (elt);
4791 tree index = TREE_PURPOSE (elt);
4792 rtx xtarget = target;
4794 if (cleared && is_zeros_p (value))
4797 unsignedp = TREE_UNSIGNED (elttype);
4798 mode = TYPE_MODE (elttype);
4799 if (mode == BLKmode)
4800 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4801 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4804 bitsize = GET_MODE_BITSIZE (mode);
4806 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4808 tree lo_index = TREE_OPERAND (index, 0);
4809 tree hi_index = TREE_OPERAND (index, 1);
4810 rtx index_r, pos_rtx, loop_end;
4811 struct nesting *loop;
4812 HOST_WIDE_INT lo, hi, count;
4815 /* If the range is constant and "small", unroll the loop. */
4817 && host_integerp (lo_index, 0)
4818 && host_integerp (hi_index, 0)
4819 && (lo = tree_low_cst (lo_index, 0),
4820 hi = tree_low_cst (hi_index, 0),
4821 count = hi - lo + 1,
4822 (GET_CODE (target) != MEM
4824 || (host_integerp (TYPE_SIZE (elttype), 1)
4825 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4828 lo -= minelt; hi -= minelt;
4829 for (; lo <= hi; lo++)
4831 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4833 if (GET_CODE (target) == MEM
4834 && !MEM_KEEP_ALIAS_SET_P (target)
4835 && TREE_CODE (type) == ARRAY_TYPE
4836 && TYPE_NONALIASED_COMPONENT (type))
4838 target = copy_rtx (target);
4839 MEM_KEEP_ALIAS_SET_P (target) = 1;
4842 store_constructor_field
4843 (target, bitsize, bitpos, mode, value, type, cleared,
4844 get_alias_set (elttype));
4849 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4850 loop_end = gen_label_rtx ();
4852 unsignedp = TREE_UNSIGNED (domain);
4854 index = build_decl (VAR_DECL, NULL_TREE, domain);
4857 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4859 SET_DECL_RTL (index, index_r);
4860 if (TREE_CODE (value) == SAVE_EXPR
4861 && SAVE_EXPR_RTL (value) == 0)
4863 /* Make sure value gets expanded once before the
4865 expand_expr (value, const0_rtx, VOIDmode, 0);
4868 store_expr (lo_index, index_r, 0);
4869 loop = expand_start_loop (0);
4871 /* Assign value to element index. */
4873 = convert (ssizetype,
4874 fold (build (MINUS_EXPR, TREE_TYPE (index),
4875 index, TYPE_MIN_VALUE (domain))));
4876 position = size_binop (MULT_EXPR, position,
4878 TYPE_SIZE_UNIT (elttype)));
4880 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4881 xtarget = offset_address (target, pos_rtx,
4882 highest_pow2_factor (position));
4883 xtarget = adjust_address (xtarget, mode, 0);
4884 if (TREE_CODE (value) == CONSTRUCTOR)
4885 store_constructor (value, xtarget, cleared,
4886 bitsize / BITS_PER_UNIT);
4888 store_expr (value, xtarget, 0);
4890 expand_exit_loop_if_false (loop,
4891 build (LT_EXPR, integer_type_node,
4894 expand_increment (build (PREINCREMENT_EXPR,
4896 index, integer_one_node), 0, 0);
4898 emit_label (loop_end);
4901 else if ((index != 0 && ! host_integerp (index, 0))
4902 || ! host_integerp (TYPE_SIZE (elttype), 1))
4907 index = ssize_int (1);
4910 index = convert (ssizetype,
4911 fold (build (MINUS_EXPR, index,
4912 TYPE_MIN_VALUE (domain))));
4914 position = size_binop (MULT_EXPR, index,
4916 TYPE_SIZE_UNIT (elttype)));
4917 xtarget = offset_address (target,
4918 expand_expr (position, 0, VOIDmode, 0),
4919 highest_pow2_factor (position));
4920 xtarget = adjust_address (xtarget, mode, 0);
4921 store_expr (value, xtarget, 0);
4926 bitpos = ((tree_low_cst (index, 0) - minelt)
4927 * tree_low_cst (TYPE_SIZE (elttype), 1));
4929 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4931 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4932 && TREE_CODE (type) == ARRAY_TYPE
4933 && TYPE_NONALIASED_COMPONENT (type))
4935 target = copy_rtx (target);
4936 MEM_KEEP_ALIAS_SET_P (target) = 1;
4939 store_constructor_field (target, bitsize, bitpos, mode, value,
4940 type, cleared, get_alias_set (elttype));
4946 /* Set constructor assignments. */
4947 else if (TREE_CODE (type) == SET_TYPE)
4949 tree elt = CONSTRUCTOR_ELTS (exp);
4950 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4951 tree domain = TYPE_DOMAIN (type);
4952 tree domain_min, domain_max, bitlength;
4954 /* The default implementation strategy is to extract the constant
4955 parts of the constructor, use that to initialize the target,
4956 and then "or" in whatever non-constant ranges we need in addition.
4958 If a large set is all zero or all ones, it is
4959 probably better to set it using memset (if available) or bzero.
4960 Also, if a large set has just a single range, it may also be
4961 better to first clear all the first clear the set (using
4962 bzero/memset), and set the bits we want. */
4964 /* Check for all zeros. */
4965 if (elt == NULL_TREE && size > 0)
4968 clear_storage (target, GEN_INT (size));
4972 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4973 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4974 bitlength = size_binop (PLUS_EXPR,
4975 size_diffop (domain_max, domain_min),
4978 nbits = tree_low_cst (bitlength, 1);
4980 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4981 are "complicated" (more than one range), initialize (the
4982 constant parts) by copying from a constant. */
4983 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4984 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4986 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4987 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4988 char *bit_buffer = alloca (nbits);
4989 HOST_WIDE_INT word = 0;
4990 unsigned int bit_pos = 0;
4991 unsigned int ibit = 0;
4992 unsigned int offset = 0; /* In bytes from beginning of set. */
4994 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4997 if (bit_buffer[ibit])
4999 if (BYTES_BIG_ENDIAN)
5000 word |= (1 << (set_word_size - 1 - bit_pos));
5002 word |= 1 << bit_pos;
5006 if (bit_pos >= set_word_size || ibit == nbits)
5008 if (word != 0 || ! cleared)
5010 rtx datum = GEN_INT (word);
5013 /* The assumption here is that it is safe to use
5014 XEXP if the set is multi-word, but not if
5015 it's single-word. */
5016 if (GET_CODE (target) == MEM)
5017 to_rtx = adjust_address (target, mode, offset);
5018 else if (offset == 0)
5022 emit_move_insn (to_rtx, datum);
5029 offset += set_word_size / BITS_PER_UNIT;
5034 /* Don't bother clearing storage if the set is all ones. */
5035 if (TREE_CHAIN (elt) != NULL_TREE
5036 || (TREE_PURPOSE (elt) == NULL_TREE
5038 : ( ! host_integerp (TREE_VALUE (elt), 0)
5039 || ! host_integerp (TREE_PURPOSE (elt), 0)
5040 || (tree_low_cst (TREE_VALUE (elt), 0)
5041 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5042 != (HOST_WIDE_INT) nbits))))
5043 clear_storage (target, expr_size (exp));
5045 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5047 /* Start of range of element or NULL. */
5048 tree startbit = TREE_PURPOSE (elt);
5049 /* End of range of element, or element value. */
5050 tree endbit = TREE_VALUE (elt);
5051 HOST_WIDE_INT startb, endb;
5052 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5054 bitlength_rtx = expand_expr (bitlength,
5055 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5057 /* Handle non-range tuple element like [ expr ]. */
5058 if (startbit == NULL_TREE)
5060 startbit = save_expr (endbit);
5064 startbit = convert (sizetype, startbit);
5065 endbit = convert (sizetype, endbit);
5066 if (! integer_zerop (domain_min))
5068 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5069 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5071 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5072 EXPAND_CONST_ADDRESS);
5073 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5074 EXPAND_CONST_ADDRESS);
5080 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5081 (GET_MODE (target), 0),
5084 emit_move_insn (targetx, target);
5087 else if (GET_CODE (target) == MEM)
5092 /* Optimization: If startbit and endbit are constants divisible
5093 by BITS_PER_UNIT, call memset instead. */
5094 if (TARGET_MEM_FUNCTIONS
5095 && TREE_CODE (startbit) == INTEGER_CST
5096 && TREE_CODE (endbit) == INTEGER_CST
5097 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5098 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5100 emit_library_call (memset_libfunc, LCT_NORMAL,
5102 plus_constant (XEXP (targetx, 0),
5103 startb / BITS_PER_UNIT),
5105 constm1_rtx, TYPE_MODE (integer_type_node),
5106 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5107 TYPE_MODE (sizetype));
5110 emit_library_call (setbits_libfunc, LCT_NORMAL,
5111 VOIDmode, 4, XEXP (targetx, 0),
5112 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5113 startbit_rtx, TYPE_MODE (sizetype),
5114 endbit_rtx, TYPE_MODE (sizetype));
5117 emit_move_insn (target, targetx);
5125 /* Store the value of EXP (an expression tree)
5126 into a subfield of TARGET which has mode MODE and occupies
5127 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5128 If MODE is VOIDmode, it means that we are storing into a bit-field.
5130 If VALUE_MODE is VOIDmode, return nothing in particular.
5131 UNSIGNEDP is not used in this case.
5133 Otherwise, return an rtx for the value stored. This rtx
5134 has mode VALUE_MODE if that is convenient to do.
5135 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5137 TYPE is the type of the underlying object,
5139 ALIAS_SET is the alias set for the destination. This value will
5140 (in general) be different from that for TARGET, since TARGET is a
5141 reference to the containing structure. */
5144 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5145 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5146 int unsignedp, tree type, int alias_set)
5148 HOST_WIDE_INT width_mask = 0;
5150 if (TREE_CODE (exp) == ERROR_MARK)
5153 /* If we have nothing to store, do nothing unless the expression has
5156 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5157 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5158 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5160 /* If we are storing into an unaligned field of an aligned union that is
5161 in a register, we may have the mode of TARGET being an integer mode but
5162 MODE == BLKmode. In that case, get an aligned object whose size and
5163 alignment are the same as TARGET and store TARGET into it (we can avoid
5164 the store if the field being stored is the entire width of TARGET). Then
5165 call ourselves recursively to store the field into a BLKmode version of
5166 that object. Finally, load from the object into TARGET. This is not
5167 very efficient in general, but should only be slightly more expensive
5168 than the otherwise-required unaligned accesses. Perhaps this can be
5169 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5170 twice, once with emit_move_insn and once via store_field. */
5173 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5175 rtx object = assign_temp (type, 0, 1, 1);
5176 rtx blk_object = adjust_address (object, BLKmode, 0);
5178 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5179 emit_move_insn (object, target);
5181 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5184 emit_move_insn (target, object);
5186 /* We want to return the BLKmode version of the data. */
5190 if (GET_CODE (target) == CONCAT)
5192 /* We're storing into a struct containing a single __complex. */
5196 return store_expr (exp, target, 0);
5199 /* If the structure is in a register or if the component
5200 is a bit field, we cannot use addressing to access it.
5201 Use bit-field techniques or SUBREG to store in it. */
5203 if (mode == VOIDmode
5204 || (mode != BLKmode && ! direct_store[(int) mode]
5205 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5206 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5207 || GET_CODE (target) == REG
5208 || GET_CODE (target) == SUBREG
5209 /* If the field isn't aligned enough to store as an ordinary memref,
5210 store it as a bit field. */
5212 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5213 || bitpos % GET_MODE_ALIGNMENT (mode))
5214 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5215 || (bitpos % BITS_PER_UNIT != 0)))
5216 /* If the RHS and field are a constant size and the size of the
5217 RHS isn't the same size as the bitfield, we must use bitfield
5220 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5221 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5223 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5225 /* If BITSIZE is narrower than the size of the type of EXP
5226 we will be narrowing TEMP. Normally, what's wanted are the
5227 low-order bits. However, if EXP's type is a record and this is
5228 big-endian machine, we want the upper BITSIZE bits. */
5229 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5230 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5231 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5232 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5233 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5237 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5239 if (mode != VOIDmode && mode != BLKmode
5240 && mode != TYPE_MODE (TREE_TYPE (exp)))
5241 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5243 /* If the modes of TARGET and TEMP are both BLKmode, both
5244 must be in memory and BITPOS must be aligned on a byte
5245 boundary. If so, we simply do a block copy. */
5246 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5248 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5249 || bitpos % BITS_PER_UNIT != 0)
5252 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5253 emit_block_move (target, temp,
5254 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5258 return value_mode == VOIDmode ? const0_rtx : target;
5261 /* Store the value in the bitfield. */
5262 store_bit_field (target, bitsize, bitpos, mode, temp,
5263 int_size_in_bytes (type));
5265 if (value_mode != VOIDmode)
5267 /* The caller wants an rtx for the value.
5268 If possible, avoid refetching from the bitfield itself. */
5270 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5273 enum machine_mode tmode;
5275 tmode = GET_MODE (temp);
5276 if (tmode == VOIDmode)
5280 return expand_and (tmode, temp,
5281 gen_int_mode (width_mask, tmode),
5284 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5285 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5286 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5289 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5290 NULL_RTX, value_mode, VOIDmode,
5291 int_size_in_bytes (type));
5297 rtx addr = XEXP (target, 0);
5298 rtx to_rtx = target;
5300 /* If a value is wanted, it must be the lhs;
5301 so make the address stable for multiple use. */
5303 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5304 && ! CONSTANT_ADDRESS_P (addr)
5305 /* A frame-pointer reference is already stable. */
5306 && ! (GET_CODE (addr) == PLUS
5307 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5308 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5309 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5310 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5312 /* Now build a reference to just the desired component. */
5314 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5316 if (to_rtx == target)
5317 to_rtx = copy_rtx (to_rtx);
5319 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5320 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5321 set_mem_alias_set (to_rtx, alias_set);
5323 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5327 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5328 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5329 codes and find the ultimate containing object, which we return.
5331 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5332 bit position, and *PUNSIGNEDP to the signedness of the field.
5333 If the position of the field is variable, we store a tree
5334 giving the variable offset (in units) in *POFFSET.
5335 This offset is in addition to the bit position.
5336 If the position is not variable, we store 0 in *POFFSET.
5338 If any of the extraction expressions is volatile,
5339 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5341 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5342 is a mode that can be used to access the field. In that case, *PBITSIZE
5345 If the field describes a variable-sized object, *PMODE is set to
5346 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5347 this case, but the address of the object can be found. */
5350 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5351 HOST_WIDE_INT *pbitpos, tree *poffset,
5352 enum machine_mode *pmode, int *punsignedp,
5356 enum machine_mode mode = VOIDmode;
5357 tree offset = size_zero_node;
5358 tree bit_offset = bitsize_zero_node;
5359 tree placeholder_ptr = 0;
5362 /* First get the mode, signedness, and size. We do this from just the
5363 outermost expression. */
5364 if (TREE_CODE (exp) == COMPONENT_REF)
5366 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5367 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5368 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5370 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5372 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5374 size_tree = TREE_OPERAND (exp, 1);
5375 *punsignedp = TREE_UNSIGNED (exp);
5379 mode = TYPE_MODE (TREE_TYPE (exp));
5380 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5382 if (mode == BLKmode)
5383 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5385 *pbitsize = GET_MODE_BITSIZE (mode);
5390 if (! host_integerp (size_tree, 1))
5391 mode = BLKmode, *pbitsize = -1;
5393 *pbitsize = tree_low_cst (size_tree, 1);
5396 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5397 and find the ultimate containing object. */
5400 if (TREE_CODE (exp) == BIT_FIELD_REF)
5401 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5402 else if (TREE_CODE (exp) == COMPONENT_REF)
5404 tree field = TREE_OPERAND (exp, 1);
5405 tree this_offset = DECL_FIELD_OFFSET (field);
5407 /* If this field hasn't been filled in yet, don't go
5408 past it. This should only happen when folding expressions
5409 made during type construction. */
5410 if (this_offset == 0)
5412 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5413 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5415 offset = size_binop (PLUS_EXPR, offset, this_offset);
5416 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5417 DECL_FIELD_BIT_OFFSET (field));
5419 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5422 else if (TREE_CODE (exp) == ARRAY_REF
5423 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5425 tree index = TREE_OPERAND (exp, 1);
5426 tree array = TREE_OPERAND (exp, 0);
5427 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5428 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5429 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5431 /* We assume all arrays have sizes that are a multiple of a byte.
5432 First subtract the lower bound, if any, in the type of the
5433 index, then convert to sizetype and multiply by the size of the
5435 if (low_bound != 0 && ! integer_zerop (low_bound))
5436 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5439 /* If the index has a self-referential type, pass it to a
5440 WITH_RECORD_EXPR; if the component size is, pass our
5441 component to one. */
5442 if (CONTAINS_PLACEHOLDER_P (index))
5443 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5444 if (CONTAINS_PLACEHOLDER_P (unit_size))
5445 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5447 offset = size_binop (PLUS_EXPR, offset,
5448 size_binop (MULT_EXPR,
5449 convert (sizetype, index),
5453 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5455 tree new = find_placeholder (exp, &placeholder_ptr);
5457 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5458 We might have been called from tree optimization where we
5459 haven't set up an object yet. */
5468 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5469 conversions that don't change the mode, and all view conversions
5470 except those that need to "step up" the alignment. */
5471 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5472 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5473 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5474 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5476 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5477 < BIGGEST_ALIGNMENT)
5478 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5479 || TYPE_ALIGN_OK (TREE_TYPE
5480 (TREE_OPERAND (exp, 0))))))
5481 && ! ((TREE_CODE (exp) == NOP_EXPR
5482 || TREE_CODE (exp) == CONVERT_EXPR)
5483 && (TYPE_MODE (TREE_TYPE (exp))
5484 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5487 /* If any reference in the chain is volatile, the effect is volatile. */
5488 if (TREE_THIS_VOLATILE (exp))
5491 exp = TREE_OPERAND (exp, 0);
5494 /* If OFFSET is constant, see if we can return the whole thing as a
5495 constant bit position. Otherwise, split it up. */
5496 if (host_integerp (offset, 0)
5497 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5499 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5500 && host_integerp (tem, 0))
5501 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5503 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5509 /* Return 1 if T is an expression that get_inner_reference handles. */
5512 handled_component_p (tree t)
5514 switch (TREE_CODE (t))
5519 case ARRAY_RANGE_REF:
5520 case NON_LVALUE_EXPR:
5521 case VIEW_CONVERT_EXPR:
5524 /* ??? Sure they are handled, but get_inner_reference may return
5525 a different PBITSIZE, depending upon whether the expression is
5526 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5529 return (TYPE_MODE (TREE_TYPE (t))
5530 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5537 /* Given an rtx VALUE that may contain additions and multiplications, return
5538 an equivalent value that just refers to a register, memory, or constant.
5539 This is done by generating instructions to perform the arithmetic and
5540 returning a pseudo-register containing the value.
5542 The returned value may be a REG, SUBREG, MEM or constant. */
5545 force_operand (rtx value, rtx target)
5548 /* Use subtarget as the target for operand 0 of a binary operation. */
5549 rtx subtarget = get_subtarget (target);
5550 enum rtx_code code = GET_CODE (value);
5552 /* Check for a PIC address load. */
5553 if ((code == PLUS || code == MINUS)
5554 && XEXP (value, 0) == pic_offset_table_rtx
5555 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5556 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5557 || GET_CODE (XEXP (value, 1)) == CONST))
5560 subtarget = gen_reg_rtx (GET_MODE (value));
5561 emit_move_insn (subtarget, value);
5565 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5568 target = gen_reg_rtx (GET_MODE (value));
5569 convert_move (target, force_operand (XEXP (value, 0), NULL),
5570 code == ZERO_EXTEND);
5574 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5576 op2 = XEXP (value, 1);
5577 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5579 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5582 op2 = negate_rtx (GET_MODE (value), op2);
5585 /* Check for an addition with OP2 a constant integer and our first
5586 operand a PLUS of a virtual register and something else. In that
5587 case, we want to emit the sum of the virtual register and the
5588 constant first and then add the other value. This allows virtual
5589 register instantiation to simply modify the constant rather than
5590 creating another one around this addition. */
5591 if (code == PLUS && GET_CODE (op2) == CONST_INT
5592 && GET_CODE (XEXP (value, 0)) == PLUS
5593 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5594 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5595 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5597 rtx temp = expand_simple_binop (GET_MODE (value), code,
5598 XEXP (XEXP (value, 0), 0), op2,
5599 subtarget, 0, OPTAB_LIB_WIDEN);
5600 return expand_simple_binop (GET_MODE (value), code, temp,
5601 force_operand (XEXP (XEXP (value,
5603 target, 0, OPTAB_LIB_WIDEN);
5606 op1 = force_operand (XEXP (value, 0), subtarget);
5607 op2 = force_operand (op2, NULL_RTX);
5611 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5613 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5614 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5615 target, 1, OPTAB_LIB_WIDEN);
5617 return expand_divmod (0,
5618 FLOAT_MODE_P (GET_MODE (value))
5619 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5620 GET_MODE (value), op1, op2, target, 0);
5623 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5627 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5631 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5635 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5636 target, 0, OPTAB_LIB_WIDEN);
5639 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5640 target, 1, OPTAB_LIB_WIDEN);
5643 if (GET_RTX_CLASS (code) == '1')
5645 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5646 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5649 #ifdef INSN_SCHEDULING
5650 /* On machines that have insn scheduling, we want all memory reference to be
5651 explicit, so we need to deal with such paradoxical SUBREGs. */
5652 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5653 && (GET_MODE_SIZE (GET_MODE (value))
5654 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5656 = simplify_gen_subreg (GET_MODE (value),
5657 force_reg (GET_MODE (SUBREG_REG (value)),
5658 force_operand (SUBREG_REG (value),
5660 GET_MODE (SUBREG_REG (value)),
5661 SUBREG_BYTE (value));
5667 /* Subroutine of expand_expr: return nonzero iff there is no way that
5668 EXP can reference X, which is being modified. TOP_P is nonzero if this
5669 call is going to be used to determine whether we need a temporary
5670 for EXP, as opposed to a recursive call to this function.
5672 It is always safe for this routine to return zero since it merely
5673 searches for optimization opportunities. */
5676 safe_from_p (rtx x, tree exp, int top_p)
5680 static tree save_expr_list;
5683 /* If EXP has varying size, we MUST use a target since we currently
5684 have no way of allocating temporaries of variable size
5685 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5686 So we assume here that something at a higher level has prevented a
5687 clash. This is somewhat bogus, but the best we can do. Only
5688 do this when X is BLKmode and when we are at the top level. */
5689 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5690 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5691 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5692 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5693 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5695 && GET_MODE (x) == BLKmode)
5696 /* If X is in the outgoing argument area, it is always safe. */
5697 || (GET_CODE (x) == MEM
5698 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5699 || (GET_CODE (XEXP (x, 0)) == PLUS
5700 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5703 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5704 find the underlying pseudo. */
5705 if (GET_CODE (x) == SUBREG)
5708 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5712 /* A SAVE_EXPR might appear many times in the expression passed to the
5713 top-level safe_from_p call, and if it has a complex subexpression,
5714 examining it multiple times could result in a combinatorial explosion.
5715 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5716 with optimization took about 28 minutes to compile -- even though it was
5717 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5718 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5719 we have processed. Note that the only test of top_p was above. */
5728 rtn = safe_from_p (x, exp, 0);
5730 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5731 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5736 /* Now look at our tree code and possibly recurse. */
5737 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5740 exp_rtl = DECL_RTL_IF_SET (exp);
5747 if (TREE_CODE (exp) == TREE_LIST)
5751 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5753 exp = TREE_CHAIN (exp);
5756 if (TREE_CODE (exp) != TREE_LIST)
5757 return safe_from_p (x, exp, 0);
5760 else if (TREE_CODE (exp) == ERROR_MARK)
5761 return 1; /* An already-visited SAVE_EXPR? */
5767 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5772 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5776 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5777 the expression. If it is set, we conflict iff we are that rtx or
5778 both are in memory. Otherwise, we check all operands of the
5779 expression recursively. */
5781 switch (TREE_CODE (exp))
5784 /* If the operand is static or we are static, we can't conflict.
5785 Likewise if we don't conflict with the operand at all. */
5786 if (staticp (TREE_OPERAND (exp, 0))
5787 || TREE_STATIC (exp)
5788 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5791 /* Otherwise, the only way this can conflict is if we are taking
5792 the address of a DECL a that address if part of X, which is
5794 exp = TREE_OPERAND (exp, 0);
5797 if (!DECL_RTL_SET_P (exp)
5798 || GET_CODE (DECL_RTL (exp)) != MEM)
5801 exp_rtl = XEXP (DECL_RTL (exp), 0);
5806 if (GET_CODE (x) == MEM
5807 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5808 get_alias_set (exp)))
5813 /* Assume that the call will clobber all hard registers and
5815 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5816 || GET_CODE (x) == MEM)
5821 /* If a sequence exists, we would have to scan every instruction
5822 in the sequence to see if it was safe. This is probably not
5824 if (RTL_EXPR_SEQUENCE (exp))
5827 exp_rtl = RTL_EXPR_RTL (exp);
5830 case WITH_CLEANUP_EXPR:
5831 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5834 case CLEANUP_POINT_EXPR:
5835 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5838 exp_rtl = SAVE_EXPR_RTL (exp);
5842 /* If we've already scanned this, don't do it again. Otherwise,
5843 show we've scanned it and record for clearing the flag if we're
5845 if (TREE_PRIVATE (exp))
5848 TREE_PRIVATE (exp) = 1;
5849 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5851 TREE_PRIVATE (exp) = 0;
5855 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5859 /* The only operand we look at is operand 1. The rest aren't
5860 part of the expression. */
5861 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5867 /* If we have an rtx, we do not need to scan our operands. */
5871 nops = first_rtl_op (TREE_CODE (exp));
5872 for (i = 0; i < nops; i++)
5873 if (TREE_OPERAND (exp, i) != 0
5874 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5877 /* If this is a language-specific tree code, it may require
5878 special handling. */
5879 if ((unsigned int) TREE_CODE (exp)
5880 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5881 && !(*lang_hooks.safe_from_p) (x, exp))
5885 /* If we have an rtl, find any enclosed object. Then see if we conflict
5889 if (GET_CODE (exp_rtl) == SUBREG)
5891 exp_rtl = SUBREG_REG (exp_rtl);
5892 if (GET_CODE (exp_rtl) == REG
5893 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5897 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5898 are memory and they conflict. */
5899 return ! (rtx_equal_p (x, exp_rtl)
5900 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5901 && true_dependence (exp_rtl, VOIDmode, x,
5902 rtx_addr_varies_p)));
5905 /* If we reach here, it is safe. */
5909 /* Subroutine of expand_expr: return rtx if EXP is a
5910 variable or parameter; else return 0. */
5916 switch (TREE_CODE (exp))
5920 return DECL_RTL (exp);
5926 #ifdef MAX_INTEGER_COMPUTATION_MODE
5929 check_max_integer_computation_mode (tree exp)
5931 enum tree_code code;
5932 enum machine_mode mode;
5934 /* Strip any NOPs that don't change the mode. */
5936 code = TREE_CODE (exp);
5938 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5939 if (code == NOP_EXPR
5940 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5943 /* First check the type of the overall operation. We need only look at
5944 unary, binary and relational operations. */
5945 if (TREE_CODE_CLASS (code) == '1'
5946 || TREE_CODE_CLASS (code) == '2'
5947 || TREE_CODE_CLASS (code) == '<')
5949 mode = TYPE_MODE (TREE_TYPE (exp));
5950 if (GET_MODE_CLASS (mode) == MODE_INT
5951 && mode > MAX_INTEGER_COMPUTATION_MODE)
5952 internal_error ("unsupported wide integer operation");
5955 /* Check operand of a unary op. */
5956 if (TREE_CODE_CLASS (code) == '1')
5958 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5959 if (GET_MODE_CLASS (mode) == MODE_INT
5960 && mode > MAX_INTEGER_COMPUTATION_MODE)
5961 internal_error ("unsupported wide integer operation");
5964 /* Check operands of a binary/comparison op. */
5965 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5967 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5968 if (GET_MODE_CLASS (mode) == MODE_INT
5969 && mode > MAX_INTEGER_COMPUTATION_MODE)
5970 internal_error ("unsupported wide integer operation");
5972 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5973 if (GET_MODE_CLASS (mode) == MODE_INT
5974 && mode > MAX_INTEGER_COMPUTATION_MODE)
5975 internal_error ("unsupported wide integer operation");
5980 /* Return the highest power of two that EXP is known to be a multiple of.
5981 This is used in updating alignment of MEMs in array references. */
5983 static unsigned HOST_WIDE_INT
5984 highest_pow2_factor (tree exp)
5986 unsigned HOST_WIDE_INT c0, c1;
5988 switch (TREE_CODE (exp))
5991 /* We can find the lowest bit that's a one. If the low
5992 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5993 We need to handle this case since we can find it in a COND_EXPR,
5994 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5995 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5997 if (TREE_CONSTANT_OVERFLOW (exp))
5998 return BIGGEST_ALIGNMENT;
6001 /* Note: tree_low_cst is intentionally not used here,
6002 we don't care about the upper bits. */
6003 c0 = TREE_INT_CST_LOW (exp);
6005 return c0 ? c0 : BIGGEST_ALIGNMENT;
6009 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6010 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6011 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6012 return MIN (c0, c1);
6015 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6016 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6019 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6021 if (integer_pow2p (TREE_OPERAND (exp, 1))
6022 && host_integerp (TREE_OPERAND (exp, 1), 1))
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6026 return MAX (1, c0 / c1);
6030 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6031 case SAVE_EXPR: case WITH_RECORD_EXPR:
6032 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6035 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6038 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6039 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6040 return MIN (c0, c1);
6049 /* Similar, except that it is known that the expression must be a multiple
6050 of the alignment of TYPE. */
6052 static unsigned HOST_WIDE_INT
6053 highest_pow2_factor_for_type (tree type, tree exp)
6055 unsigned HOST_WIDE_INT type_align, factor;
6057 factor = highest_pow2_factor (exp);
6058 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6059 return MAX (factor, type_align);
6062 /* Return an object on the placeholder list that matches EXP, a
6063 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6064 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6065 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6066 is a location which initially points to a starting location in the
6067 placeholder list (zero means start of the list) and where a pointer into
6068 the placeholder list at which the object is found is placed. */
6071 find_placeholder (tree exp, tree *plist)
6073 tree type = TREE_TYPE (exp);
6074 tree placeholder_expr;
6076 for (placeholder_expr
6077 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6078 placeholder_expr != 0;
6079 placeholder_expr = TREE_CHAIN (placeholder_expr))
6081 tree need_type = TYPE_MAIN_VARIANT (type);
6084 /* Find the outermost reference that is of the type we want. If none,
6085 see if any object has a type that is a pointer to the type we
6087 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6088 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6089 || TREE_CODE (elt) == COND_EXPR)
6090 ? TREE_OPERAND (elt, 1)
6091 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6095 ? TREE_OPERAND (elt, 0) : 0))
6096 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6099 *plist = placeholder_expr;
6103 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6105 = ((TREE_CODE (elt) == COMPOUND_EXPR
6106 || TREE_CODE (elt) == COND_EXPR)
6107 ? TREE_OPERAND (elt, 1)
6108 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6111 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6112 ? TREE_OPERAND (elt, 0) : 0))
6113 if (POINTER_TYPE_P (TREE_TYPE (elt))
6114 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6118 *plist = placeholder_expr;
6119 return build1 (INDIRECT_REF, need_type, elt);
6126 /* Subroutine of expand_expr. Expand the two operands of a binary
6127 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6128 The value may be stored in TARGET if TARGET is nonzero. The
6129 MODIFIER argument is as documented by expand_expr. */
6132 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6133 enum expand_modifier modifier)
6135 if (! safe_from_p (target, exp1, 1))
6137 if (operand_equal_p (exp0, exp1, 0))
6139 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6140 *op1 = copy_rtx (*op0);
6144 /* If we need to preserve evaluation order, copy exp0 into its own
6145 temporary variable so that it can't be clobbered by exp1. */
6146 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6147 exp0 = save_expr (exp0);
6148 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6149 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6154 /* expand_expr: generate code for computing expression EXP.
6155 An rtx for the computed value is returned. The value is never null.
6156 In the case of a void EXP, const0_rtx is returned.
6158 The value may be stored in TARGET if TARGET is nonzero.
6159 TARGET is just a suggestion; callers must assume that
6160 the rtx returned may not be the same as TARGET.
6162 If TARGET is CONST0_RTX, it means that the value will be ignored.
6164 If TMODE is not VOIDmode, it suggests generating the
6165 result in mode TMODE. But this is done only when convenient.
6166 Otherwise, TMODE is ignored and the value generated in its natural mode.
6167 TMODE is just a suggestion; callers must assume that
6168 the rtx returned may not have mode TMODE.
6170 Note that TARGET may have neither TMODE nor MODE. In that case, it
6171 probably will not be used.
6173 If MODIFIER is EXPAND_SUM then when EXP is an addition
6174 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6175 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6176 products as above, or REG or MEM, or constant.
6177 Ordinarily in such cases we would output mul or add instructions
6178 and then return a pseudo reg containing the sum.
6180 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6181 it also marks a label as absolutely required (it can't be dead).
6182 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6183 This is used for outputting expressions used in initializers.
6185 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6186 with a constant address even if that address is not normally legitimate.
6187 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6189 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6190 a call parameter. Such targets require special care as we haven't yet
6191 marked TARGET so that it's safe from being trashed by libcalls. We
6192 don't want to use TARGET for anything but the final result;
6193 Intermediate values must go elsewhere. Additionally, calls to
6194 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6197 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6198 enum expand_modifier modifier)
6201 tree type = TREE_TYPE (exp);
6202 int unsignedp = TREE_UNSIGNED (type);
6203 enum machine_mode mode;
6204 enum tree_code code = TREE_CODE (exp);
6206 rtx subtarget, original_target;
6210 /* Handle ERROR_MARK before anybody tries to access its type. */
6211 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6213 op0 = CONST0_RTX (tmode);
6219 mode = TYPE_MODE (type);
6220 /* Use subtarget as the target for operand 0 of a binary operation. */
6221 subtarget = get_subtarget (target);
6222 original_target = target;
6223 ignore = (target == const0_rtx
6224 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6225 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6226 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6227 && TREE_CODE (type) == VOID_TYPE));
6229 /* If we are going to ignore this result, we need only do something
6230 if there is a side-effect somewhere in the expression. If there
6231 is, short-circuit the most common cases here. Note that we must
6232 not call expand_expr with anything but const0_rtx in case this
6233 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6237 if (! TREE_SIDE_EFFECTS (exp))
6240 /* Ensure we reference a volatile object even if value is ignored, but
6241 don't do this if all we are doing is taking its address. */
6242 if (TREE_THIS_VOLATILE (exp)
6243 && TREE_CODE (exp) != FUNCTION_DECL
6244 && mode != VOIDmode && mode != BLKmode
6245 && modifier != EXPAND_CONST_ADDRESS)
6247 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6248 if (GET_CODE (temp) == MEM)
6249 temp = copy_to_reg (temp);
6253 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6254 || code == INDIRECT_REF || code == BUFFER_REF)
6255 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6258 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6259 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6261 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6262 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6265 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6266 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6267 /* If the second operand has no side effects, just evaluate
6269 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6271 else if (code == BIT_FIELD_REF)
6273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6274 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6275 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6282 #ifdef MAX_INTEGER_COMPUTATION_MODE
6283 /* Only check stuff here if the mode we want is different from the mode
6284 of the expression; if it's the same, check_max_integer_computation_mode
6285 will handle it. Do we really need to check this stuff at all? */
6288 && GET_MODE (target) != mode
6289 && TREE_CODE (exp) != INTEGER_CST
6290 && TREE_CODE (exp) != PARM_DECL
6291 && TREE_CODE (exp) != ARRAY_REF
6292 && TREE_CODE (exp) != ARRAY_RANGE_REF
6293 && TREE_CODE (exp) != COMPONENT_REF
6294 && TREE_CODE (exp) != BIT_FIELD_REF
6295 && TREE_CODE (exp) != INDIRECT_REF
6296 && TREE_CODE (exp) != CALL_EXPR
6297 && TREE_CODE (exp) != VAR_DECL
6298 && TREE_CODE (exp) != RTL_EXPR)
6300 enum machine_mode mode = GET_MODE (target);
6302 if (GET_MODE_CLASS (mode) == MODE_INT
6303 && mode > MAX_INTEGER_COMPUTATION_MODE)
6304 internal_error ("unsupported wide integer operation");
6308 && TREE_CODE (exp) != INTEGER_CST
6309 && TREE_CODE (exp) != PARM_DECL
6310 && TREE_CODE (exp) != ARRAY_REF
6311 && TREE_CODE (exp) != ARRAY_RANGE_REF
6312 && TREE_CODE (exp) != COMPONENT_REF
6313 && TREE_CODE (exp) != BIT_FIELD_REF
6314 && TREE_CODE (exp) != INDIRECT_REF
6315 && TREE_CODE (exp) != VAR_DECL
6316 && TREE_CODE (exp) != CALL_EXPR
6317 && TREE_CODE (exp) != RTL_EXPR
6318 && GET_MODE_CLASS (tmode) == MODE_INT
6319 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6320 internal_error ("unsupported wide integer operation");
6322 check_max_integer_computation_mode (exp);
6325 /* If will do cse, generate all results into pseudo registers
6326 since 1) that allows cse to find more things
6327 and 2) otherwise cse could produce an insn the machine
6328 cannot support. An exception is a CONSTRUCTOR into a multi-word
6329 MEM: that's much more likely to be most efficient into the MEM.
6330 Another is a CALL_EXPR which must return in memory. */
6332 if (! cse_not_expected && mode != BLKmode && target
6333 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6334 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6335 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6342 tree function = decl_function_context (exp);
6343 /* Labels in containing functions, or labels used from initializers,
6345 if (modifier == EXPAND_INITIALIZER
6346 || (function != current_function_decl
6347 && function != inline_function_decl
6349 temp = force_label_rtx (exp);
6351 temp = label_rtx (exp);
6353 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6354 if (function != current_function_decl
6355 && function != inline_function_decl && function != 0)
6356 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6361 if (!DECL_RTL_SET_P (exp))
6363 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6364 return CONST0_RTX (mode);
6367 /* ... fall through ... */
6370 /* If a static var's type was incomplete when the decl was written,
6371 but the type is complete now, lay out the decl now. */
6372 if (DECL_SIZE (exp) == 0
6373 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6374 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6375 layout_decl (exp, 0);
6377 /* ... fall through ... */
6381 if (DECL_RTL (exp) == 0)
6384 /* Ensure variable marked as used even if it doesn't go through
6385 a parser. If it hasn't be used yet, write out an external
6387 if (! TREE_USED (exp))
6389 assemble_external (exp);
6390 TREE_USED (exp) = 1;
6393 /* Show we haven't gotten RTL for this yet. */
6396 /* Handle variables inherited from containing functions. */
6397 context = decl_function_context (exp);
6399 /* We treat inline_function_decl as an alias for the current function
6400 because that is the inline function whose vars, types, etc.
6401 are being merged into the current function.
6402 See expand_inline_function. */
6404 if (context != 0 && context != current_function_decl
6405 && context != inline_function_decl
6406 /* If var is static, we don't need a static chain to access it. */
6407 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6408 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6412 /* Mark as non-local and addressable. */
6413 DECL_NONLOCAL (exp) = 1;
6414 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6416 (*lang_hooks.mark_addressable) (exp);
6417 if (GET_CODE (DECL_RTL (exp)) != MEM)
6419 addr = XEXP (DECL_RTL (exp), 0);
6420 if (GET_CODE (addr) == MEM)
6422 = replace_equiv_address (addr,
6423 fix_lexical_addr (XEXP (addr, 0), exp));
6425 addr = fix_lexical_addr (addr, exp);
6427 temp = replace_equiv_address (DECL_RTL (exp), addr);
6430 /* This is the case of an array whose size is to be determined
6431 from its initializer, while the initializer is still being parsed.
6434 else if (GET_CODE (DECL_RTL (exp)) == MEM
6435 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6436 temp = validize_mem (DECL_RTL (exp));
6438 /* If DECL_RTL is memory, we are in the normal case and either
6439 the address is not valid or it is not a register and -fforce-addr
6440 is specified, get the address into a register. */
6442 else if (GET_CODE (DECL_RTL (exp)) == MEM
6443 && modifier != EXPAND_CONST_ADDRESS
6444 && modifier != EXPAND_SUM
6445 && modifier != EXPAND_INITIALIZER
6446 && (! memory_address_p (DECL_MODE (exp),
6447 XEXP (DECL_RTL (exp), 0))
6449 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6450 temp = replace_equiv_address (DECL_RTL (exp),
6451 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6453 /* If we got something, return it. But first, set the alignment
6454 if the address is a register. */
6457 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6458 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6463 /* If the mode of DECL_RTL does not match that of the decl, it
6464 must be a promoted value. We return a SUBREG of the wanted mode,
6465 but mark it so that we know that it was already extended. */
6467 if (GET_CODE (DECL_RTL (exp)) == REG
6468 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6470 /* Get the signedness used for this variable. Ensure we get the
6471 same mode we got when the variable was declared. */
6472 if (GET_MODE (DECL_RTL (exp))
6473 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6474 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6477 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6478 SUBREG_PROMOTED_VAR_P (temp) = 1;
6479 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6483 return DECL_RTL (exp);
6486 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6487 TREE_INT_CST_HIGH (exp), mode);
6489 /* ??? If overflow is set, fold will have done an incomplete job,
6490 which can result in (plus xx (const_int 0)), which can get
6491 simplified by validate_replace_rtx during virtual register
6492 instantiation, which can result in unrecognizable insns.
6493 Avoid this by forcing all overflows into registers. */
6494 if (TREE_CONSTANT_OVERFLOW (exp)
6495 && modifier != EXPAND_INITIALIZER)
6496 temp = force_reg (mode, temp);
6501 return const_vector_from_tree (exp);
6504 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6507 /* If optimized, generate immediate CONST_DOUBLE
6508 which will be turned into memory by reload if necessary.
6510 We used to force a register so that loop.c could see it. But
6511 this does not allow gen_* patterns to perform optimizations with
6512 the constants. It also produces two insns in cases like "x = 1.0;".
6513 On most machines, floating-point constants are not permitted in
6514 many insns, so we'd end up copying it to a register in any case.
6516 Now, we do the copying in expand_binop, if appropriate. */
6517 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6518 TYPE_MODE (TREE_TYPE (exp)));
6521 /* Handle evaluating a complex constant in a CONCAT target. */
6522 if (original_target && GET_CODE (original_target) == CONCAT)
6524 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6527 rtarg = XEXP (original_target, 0);
6528 itarg = XEXP (original_target, 1);
6530 /* Move the real and imaginary parts separately. */
6531 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6532 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6535 emit_move_insn (rtarg, op0);
6537 emit_move_insn (itarg, op1);
6539 return original_target;
6542 /* ... fall through ... */
6545 temp = output_constant_def (exp, 1);
6547 /* temp contains a constant address.
6548 On RISC machines where a constant address isn't valid,
6549 make some insns to get that address into a register. */
6550 if (modifier != EXPAND_CONST_ADDRESS
6551 && modifier != EXPAND_INITIALIZER
6552 && modifier != EXPAND_SUM
6553 && (! memory_address_p (mode, XEXP (temp, 0))
6554 || flag_force_addr))
6555 return replace_equiv_address (temp,
6556 copy_rtx (XEXP (temp, 0)));
6559 case EXPR_WITH_FILE_LOCATION:
6562 struct file_stack fs;
6564 fs.location = input_location;
6565 fs.next = expr_wfl_stack;
6566 input_filename = EXPR_WFL_FILENAME (exp);
6567 input_line = EXPR_WFL_LINENO (exp);
6568 expr_wfl_stack = &fs;
6569 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6570 emit_line_note (input_location);
6571 /* Possibly avoid switching back and forth here. */
6572 to_return = expand_expr (EXPR_WFL_NODE (exp),
6573 (ignore ? const0_rtx : target),
6575 if (expr_wfl_stack != &fs)
6577 input_location = fs.location;
6578 expr_wfl_stack = fs.next;
6583 context = decl_function_context (exp);
6585 /* If this SAVE_EXPR was at global context, assume we are an
6586 initialization function and move it into our context. */
6588 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6590 /* We treat inline_function_decl as an alias for the current function
6591 because that is the inline function whose vars, types, etc.
6592 are being merged into the current function.
6593 See expand_inline_function. */
6594 if (context == current_function_decl || context == inline_function_decl)
6597 /* If this is non-local, handle it. */
6600 /* The following call just exists to abort if the context is
6601 not of a containing function. */
6602 find_function_data (context);
6604 temp = SAVE_EXPR_RTL (exp);
6605 if (temp && GET_CODE (temp) == REG)
6607 put_var_into_stack (exp, /*rescan=*/true);
6608 temp = SAVE_EXPR_RTL (exp);
6610 if (temp == 0 || GET_CODE (temp) != MEM)
6613 replace_equiv_address (temp,
6614 fix_lexical_addr (XEXP (temp, 0), exp));
6616 if (SAVE_EXPR_RTL (exp) == 0)
6618 if (mode == VOIDmode)
6621 temp = assign_temp (build_qualified_type (type,
6623 | TYPE_QUAL_CONST)),
6626 SAVE_EXPR_RTL (exp) = temp;
6627 if (!optimize && GET_CODE (temp) == REG)
6628 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6631 /* If the mode of TEMP does not match that of the expression, it
6632 must be a promoted value. We pass store_expr a SUBREG of the
6633 wanted mode but mark it so that we know that it was already
6636 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6638 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6639 promote_mode (type, mode, &unsignedp, 0);
6640 SUBREG_PROMOTED_VAR_P (temp) = 1;
6641 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6644 if (temp == const0_rtx)
6645 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6647 store_expr (TREE_OPERAND (exp, 0), temp,
6648 modifier == EXPAND_STACK_PARM ? 2 : 0);
6650 TREE_USED (exp) = 1;
6653 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6654 must be a promoted value. We return a SUBREG of the wanted mode,
6655 but mark it so that we know that it was already extended. */
6657 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6658 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6660 /* Compute the signedness and make the proper SUBREG. */
6661 promote_mode (type, mode, &unsignedp, 0);
6662 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6663 SUBREG_PROMOTED_VAR_P (temp) = 1;
6664 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6668 return SAVE_EXPR_RTL (exp);
6673 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6674 TREE_OPERAND (exp, 0)
6675 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6679 case PLACEHOLDER_EXPR:
6681 tree old_list = placeholder_list;
6682 tree placeholder_expr = 0;
6684 exp = find_placeholder (exp, &placeholder_expr);
6688 placeholder_list = TREE_CHAIN (placeholder_expr);
6689 temp = expand_expr (exp, original_target, tmode, modifier);
6690 placeholder_list = old_list;
6694 case WITH_RECORD_EXPR:
6695 /* Put the object on the placeholder list, expand our first operand,
6696 and pop the list. */
6697 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6699 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6701 placeholder_list = TREE_CHAIN (placeholder_list);
6705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6706 expand_goto (TREE_OPERAND (exp, 0));
6708 expand_computed_goto (TREE_OPERAND (exp, 0));
6712 expand_exit_loop_if_false (NULL,
6713 invert_truthvalue (TREE_OPERAND (exp, 0)));
6716 case LABELED_BLOCK_EXPR:
6717 if (LABELED_BLOCK_BODY (exp))
6718 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6719 /* Should perhaps use expand_label, but this is simpler and safer. */
6720 do_pending_stack_adjust ();
6721 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6724 case EXIT_BLOCK_EXPR:
6725 if (EXIT_BLOCK_RETURN (exp))
6726 sorry ("returned value in block_exit_expr");
6727 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6732 expand_start_loop (1);
6733 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6741 tree vars = TREE_OPERAND (exp, 0);
6743 /* Need to open a binding contour here because
6744 if there are any cleanups they must be contained here. */
6745 expand_start_bindings (2);
6747 /* Mark the corresponding BLOCK for output in its proper place. */
6748 if (TREE_OPERAND (exp, 2) != 0
6749 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6750 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6752 /* If VARS have not yet been expanded, expand them now. */
6755 if (!DECL_RTL_SET_P (vars))
6757 expand_decl_init (vars);
6758 vars = TREE_CHAIN (vars);
6761 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6763 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6769 if (RTL_EXPR_SEQUENCE (exp))
6771 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6773 emit_insn (RTL_EXPR_SEQUENCE (exp));
6774 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6776 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6777 free_temps_for_rtl_expr (exp);
6778 return RTL_EXPR_RTL (exp);
6781 /* If we don't need the result, just ensure we evaluate any
6787 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6788 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6793 /* All elts simple constants => refer to a constant in memory. But
6794 if this is a non-BLKmode mode, let it store a field at a time
6795 since that should make a CONST_INT or CONST_DOUBLE when we
6796 fold. Likewise, if we have a target we can use, it is best to
6797 store directly into the target unless the type is large enough
6798 that memcpy will be used. If we are making an initializer and
6799 all operands are constant, put it in memory as well.
6801 FIXME: Avoid trying to fill vector constructors piece-meal.
6802 Output them with output_constant_def below unless we're sure
6803 they're zeros. This should go away when vector initializers
6804 are treated like VECTOR_CST instead of arrays.
6806 else if ((TREE_STATIC (exp)
6807 && ((mode == BLKmode
6808 && ! (target != 0 && safe_from_p (target, exp, 1)))
6809 || TREE_ADDRESSABLE (exp)
6810 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6811 && (! MOVE_BY_PIECES_P
6812 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6814 && ((TREE_CODE (type) == VECTOR_TYPE
6815 && !is_zeros_p (exp))
6816 || ! mostly_zeros_p (exp)))))
6817 || ((modifier == EXPAND_INITIALIZER
6818 || modifier == EXPAND_CONST_ADDRESS)
6819 && TREE_CONSTANT (exp)))
6821 rtx constructor = output_constant_def (exp, 1);
6823 if (modifier != EXPAND_CONST_ADDRESS
6824 && modifier != EXPAND_INITIALIZER
6825 && modifier != EXPAND_SUM)
6826 constructor = validize_mem (constructor);
6832 /* Handle calls that pass values in multiple non-contiguous
6833 locations. The Irix 6 ABI has examples of this. */
6834 if (target == 0 || ! safe_from_p (target, exp, 1)
6835 || GET_CODE (target) == PARALLEL
6836 || modifier == EXPAND_STACK_PARM)
6838 = assign_temp (build_qualified_type (type,
6840 | (TREE_READONLY (exp)
6841 * TYPE_QUAL_CONST))),
6842 0, TREE_ADDRESSABLE (exp), 1);
6844 store_constructor (exp, target, 0, int_expr_size (exp));
6850 tree exp1 = TREE_OPERAND (exp, 0);
6852 tree string = string_constant (exp1, &index);
6854 /* Try to optimize reads from const strings. */
6856 && TREE_CODE (string) == STRING_CST
6857 && TREE_CODE (index) == INTEGER_CST
6858 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6859 && GET_MODE_CLASS (mode) == MODE_INT
6860 && GET_MODE_SIZE (mode) == 1
6861 && modifier != EXPAND_WRITE)
6862 return gen_int_mode (TREE_STRING_POINTER (string)
6863 [TREE_INT_CST_LOW (index)], mode);
6865 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6866 op0 = memory_address (mode, op0);
6867 temp = gen_rtx_MEM (mode, op0);
6868 set_mem_attributes (temp, exp, 0);
6870 /* If we are writing to this object and its type is a record with
6871 readonly fields, we must mark it as readonly so it will
6872 conflict with readonly references to those fields. */
6873 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6874 RTX_UNCHANGING_P (temp) = 1;
6880 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6884 tree array = TREE_OPERAND (exp, 0);
6885 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6886 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6887 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6890 /* Optimize the special-case of a zero lower bound.
6892 We convert the low_bound to sizetype to avoid some problems
6893 with constant folding. (E.g. suppose the lower bound is 1,
6894 and its mode is QI. Without the conversion, (ARRAY
6895 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6896 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6898 if (! integer_zerop (low_bound))
6899 index = size_diffop (index, convert (sizetype, low_bound));
6901 /* Fold an expression like: "foo"[2].
6902 This is not done in fold so it won't happen inside &.
6903 Don't fold if this is for wide characters since it's too
6904 difficult to do correctly and this is a very rare case. */
6906 if (modifier != EXPAND_CONST_ADDRESS
6907 && modifier != EXPAND_INITIALIZER
6908 && modifier != EXPAND_MEMORY
6909 && TREE_CODE (array) == STRING_CST
6910 && TREE_CODE (index) == INTEGER_CST
6911 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6912 && GET_MODE_CLASS (mode) == MODE_INT
6913 && GET_MODE_SIZE (mode) == 1)
6914 return gen_int_mode (TREE_STRING_POINTER (array)
6915 [TREE_INT_CST_LOW (index)], mode);
6917 /* If this is a constant index into a constant array,
6918 just get the value from the array. Handle both the cases when
6919 we have an explicit constructor and when our operand is a variable
6920 that was declared const. */
6922 if (modifier != EXPAND_CONST_ADDRESS
6923 && modifier != EXPAND_INITIALIZER
6924 && modifier != EXPAND_MEMORY
6925 && TREE_CODE (array) == CONSTRUCTOR
6926 && ! TREE_SIDE_EFFECTS (array)
6927 && TREE_CODE (index) == INTEGER_CST
6928 && 0 > compare_tree_int (index,
6929 list_length (CONSTRUCTOR_ELTS
6930 (TREE_OPERAND (exp, 0)))))
6934 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6935 i = TREE_INT_CST_LOW (index);
6936 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6940 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6944 else if (optimize >= 1
6945 && modifier != EXPAND_CONST_ADDRESS
6946 && modifier != EXPAND_INITIALIZER
6947 && modifier != EXPAND_MEMORY
6948 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6949 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6950 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6952 if (TREE_CODE (index) == INTEGER_CST)
6954 tree init = DECL_INITIAL (array);
6956 if (TREE_CODE (init) == CONSTRUCTOR)
6960 for (elem = CONSTRUCTOR_ELTS (init);
6962 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6963 elem = TREE_CHAIN (elem))
6966 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6967 return expand_expr (fold (TREE_VALUE (elem)), target,
6970 else if (TREE_CODE (init) == STRING_CST
6971 && 0 > compare_tree_int (index,
6972 TREE_STRING_LENGTH (init)))
6974 tree type = TREE_TYPE (TREE_TYPE (init));
6975 enum machine_mode mode = TYPE_MODE (type);
6977 if (GET_MODE_CLASS (mode) == MODE_INT
6978 && GET_MODE_SIZE (mode) == 1)
6979 return gen_int_mode (TREE_STRING_POINTER (init)
6980 [TREE_INT_CST_LOW (index)], mode);
6985 goto normal_inner_ref;
6988 /* If the operand is a CONSTRUCTOR, we can just extract the
6989 appropriate field if it is present. */
6990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6994 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6995 elt = TREE_CHAIN (elt))
6996 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6997 /* We can normally use the value of the field in the
6998 CONSTRUCTOR. However, if this is a bitfield in
6999 an integral mode that we can fit in a HOST_WIDE_INT,
7000 we must mask only the number of bits in the bitfield,
7001 since this is done implicitly by the constructor. If
7002 the bitfield does not meet either of those conditions,
7003 we can't do this optimization. */
7004 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7005 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7007 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7008 <= HOST_BITS_PER_WIDE_INT))))
7010 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7011 && modifier == EXPAND_STACK_PARM)
7013 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7014 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7016 HOST_WIDE_INT bitsize
7017 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7018 enum machine_mode imode
7019 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7021 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7023 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7024 op0 = expand_and (imode, op0, op1, target);
7029 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7032 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7034 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7042 goto normal_inner_ref;
7045 case ARRAY_RANGE_REF:
7048 enum machine_mode mode1;
7049 HOST_WIDE_INT bitsize, bitpos;
7052 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7053 &mode1, &unsignedp, &volatilep);
7056 /* If we got back the original object, something is wrong. Perhaps
7057 we are evaluating an expression too early. In any event, don't
7058 infinitely recurse. */
7062 /* If TEM's type is a union of variable size, pass TARGET to the inner
7063 computation, since it will need a temporary and TARGET is known
7064 to have to do. This occurs in unchecked conversion in Ada. */
7068 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7069 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7071 && modifier != EXPAND_STACK_PARM
7072 ? target : NULL_RTX),
7074 (modifier == EXPAND_INITIALIZER
7075 || modifier == EXPAND_CONST_ADDRESS
7076 || modifier == EXPAND_STACK_PARM)
7077 ? modifier : EXPAND_NORMAL);
7079 /* If this is a constant, put it into a register if it is a
7080 legitimate constant and OFFSET is 0 and memory if it isn't. */
7081 if (CONSTANT_P (op0))
7083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7084 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7086 op0 = force_reg (mode, op0);
7088 op0 = validize_mem (force_const_mem (mode, op0));
7091 /* Otherwise, if this object not in memory and we either have an
7092 offset or a BLKmode result, put it there. This case can't occur in
7093 C, but can in Ada if we have unchecked conversion of an expression
7094 from a scalar type to an array or record type or for an
7095 ARRAY_RANGE_REF whose type is BLKmode. */
7096 else if (GET_CODE (op0) != MEM
7098 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7100 /* If the operand is a SAVE_EXPR, we can deal with this by
7101 forcing the SAVE_EXPR into memory. */
7102 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7104 put_var_into_stack (TREE_OPERAND (exp, 0),
7106 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7111 = build_qualified_type (TREE_TYPE (tem),
7112 (TYPE_QUALS (TREE_TYPE (tem))
7113 | TYPE_QUAL_CONST));
7114 rtx memloc = assign_temp (nt, 1, 1, 1);
7116 emit_move_insn (memloc, op0);
7123 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7126 if (GET_CODE (op0) != MEM)
7129 #ifdef POINTERS_EXTEND_UNSIGNED
7130 if (GET_MODE (offset_rtx) != Pmode)
7131 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7133 if (GET_MODE (offset_rtx) != ptr_mode)
7134 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7137 /* A constant address in OP0 can have VOIDmode, we must not try
7138 to call force_reg for that case. Avoid that case. */
7139 if (GET_CODE (op0) == MEM
7140 && GET_MODE (op0) == BLKmode
7141 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7143 && (bitpos % bitsize) == 0
7144 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7145 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7147 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7151 op0 = offset_address (op0, offset_rtx,
7152 highest_pow2_factor (offset));
7155 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7156 record its alignment as BIGGEST_ALIGNMENT. */
7157 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7158 && is_aligning_offset (offset, tem))
7159 set_mem_align (op0, BIGGEST_ALIGNMENT);
7161 /* Don't forget about volatility even if this is a bitfield. */
7162 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7164 if (op0 == orig_op0)
7165 op0 = copy_rtx (op0);
7167 MEM_VOLATILE_P (op0) = 1;
7170 /* The following code doesn't handle CONCAT.
7171 Assume only bitpos == 0 can be used for CONCAT, due to
7172 one element arrays having the same mode as its element. */
7173 if (GET_CODE (op0) == CONCAT)
7175 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7180 /* In cases where an aligned union has an unaligned object
7181 as a field, we might be extracting a BLKmode value from
7182 an integer-mode (e.g., SImode) object. Handle this case
7183 by doing the extract into an object as wide as the field
7184 (which we know to be the width of a basic mode), then
7185 storing into memory, and changing the mode to BLKmode. */
7186 if (mode1 == VOIDmode
7187 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7188 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7189 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7191 && modifier != EXPAND_CONST_ADDRESS
7192 && modifier != EXPAND_INITIALIZER)
7193 /* If the field isn't aligned enough to fetch as a memref,
7194 fetch it as a bit field. */
7195 || (mode1 != BLKmode
7196 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7197 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7198 && ((modifier == EXPAND_CONST_ADDRESS
7199 || modifier == EXPAND_INITIALIZER)
7201 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7202 || (bitpos % BITS_PER_UNIT != 0)))
7203 /* If the type and the field are a constant size and the
7204 size of the type isn't the same size as the bitfield,
7205 we must use bitfield operations. */
7207 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7209 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7212 enum machine_mode ext_mode = mode;
7214 if (ext_mode == BLKmode
7215 && ! (target != 0 && GET_CODE (op0) == MEM
7216 && GET_CODE (target) == MEM
7217 && bitpos % BITS_PER_UNIT == 0))
7218 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7220 if (ext_mode == BLKmode)
7223 target = assign_temp (type, 0, 1, 1);
7228 /* In this case, BITPOS must start at a byte boundary and
7229 TARGET, if specified, must be a MEM. */
7230 if (GET_CODE (op0) != MEM
7231 || (target != 0 && GET_CODE (target) != MEM)
7232 || bitpos % BITS_PER_UNIT != 0)
7235 emit_block_move (target,
7236 adjust_address (op0, VOIDmode,
7237 bitpos / BITS_PER_UNIT),
7238 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7240 (modifier == EXPAND_STACK_PARM
7241 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7246 op0 = validize_mem (op0);
7248 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7249 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7251 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7252 (modifier == EXPAND_STACK_PARM
7253 ? NULL_RTX : target),
7255 int_size_in_bytes (TREE_TYPE (tem)));
7257 /* If the result is a record type and BITSIZE is narrower than
7258 the mode of OP0, an integral mode, and this is a big endian
7259 machine, we must put the field into the high-order bits. */
7260 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7261 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7262 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7263 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7264 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7268 if (mode == BLKmode)
7270 rtx new = assign_temp (build_qualified_type
7271 ((*lang_hooks.types.type_for_mode)
7273 TYPE_QUAL_CONST), 0, 1, 1);
7275 emit_move_insn (new, op0);
7276 op0 = copy_rtx (new);
7277 PUT_MODE (op0, BLKmode);
7278 set_mem_attributes (op0, exp, 1);
7284 /* If the result is BLKmode, use that to access the object
7286 if (mode == BLKmode)
7289 /* Get a reference to just this component. */
7290 if (modifier == EXPAND_CONST_ADDRESS
7291 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7292 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7294 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7296 if (op0 == orig_op0)
7297 op0 = copy_rtx (op0);
7299 set_mem_attributes (op0, exp, 0);
7300 if (GET_CODE (XEXP (op0, 0)) == REG)
7301 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7303 MEM_VOLATILE_P (op0) |= volatilep;
7304 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7305 || modifier == EXPAND_CONST_ADDRESS
7306 || modifier == EXPAND_INITIALIZER)
7308 else if (target == 0)
7309 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7311 convert_move (target, op0, unsignedp);
7317 rtx insn, before = get_last_insn (), vtbl_ref;
7319 /* Evaluate the interior expression. */
7320 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7323 /* Get or create an instruction off which to hang a note. */
7324 if (REG_P (subtarget))
7327 insn = get_last_insn ();
7330 if (! INSN_P (insn))
7331 insn = prev_nonnote_insn (insn);
7335 target = gen_reg_rtx (GET_MODE (subtarget));
7336 insn = emit_move_insn (target, subtarget);
7339 /* Collect the data for the note. */
7340 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7341 vtbl_ref = plus_constant (vtbl_ref,
7342 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7343 /* Discard the initial CONST that was added. */
7344 vtbl_ref = XEXP (vtbl_ref, 0);
7347 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7352 /* Intended for a reference to a buffer of a file-object in Pascal.
7353 But it's not certain that a special tree code will really be
7354 necessary for these. INDIRECT_REF might work for them. */
7360 /* Pascal set IN expression.
7363 rlo = set_low - (set_low%bits_per_word);
7364 the_word = set [ (index - rlo)/bits_per_word ];
7365 bit_index = index % bits_per_word;
7366 bitmask = 1 << bit_index;
7367 return !!(the_word & bitmask); */
7369 tree set = TREE_OPERAND (exp, 0);
7370 tree index = TREE_OPERAND (exp, 1);
7371 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7372 tree set_type = TREE_TYPE (set);
7373 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7374 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7375 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7376 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7377 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7378 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7379 rtx setaddr = XEXP (setval, 0);
7380 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7382 rtx diff, quo, rem, addr, bit, result;
7384 /* If domain is empty, answer is no. Likewise if index is constant
7385 and out of bounds. */
7386 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7387 && TREE_CODE (set_low_bound) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound, set_low_bound))
7389 || (TREE_CODE (index) == INTEGER_CST
7390 && TREE_CODE (set_low_bound) == INTEGER_CST
7391 && tree_int_cst_lt (index, set_low_bound))
7392 || (TREE_CODE (set_high_bound) == INTEGER_CST
7393 && TREE_CODE (index) == INTEGER_CST
7394 && tree_int_cst_lt (set_high_bound, index))))
7398 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7400 /* If we get here, we have to generate the code for both cases
7401 (in range and out of range). */
7403 op0 = gen_label_rtx ();
7404 op1 = gen_label_rtx ();
7406 if (! (GET_CODE (index_val) == CONST_INT
7407 && GET_CODE (lo_r) == CONST_INT))
7408 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7409 GET_MODE (index_val), iunsignedp, op1);
7411 if (! (GET_CODE (index_val) == CONST_INT
7412 && GET_CODE (hi_r) == CONST_INT))
7413 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7414 GET_MODE (index_val), iunsignedp, op1);
7416 /* Calculate the element number of bit zero in the first word
7418 if (GET_CODE (lo_r) == CONST_INT)
7419 rlow = GEN_INT (INTVAL (lo_r)
7420 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7422 rlow = expand_binop (index_mode, and_optab, lo_r,
7423 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7424 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7426 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7427 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7429 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7430 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7431 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7432 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7434 addr = memory_address (byte_mode,
7435 expand_binop (index_mode, add_optab, diff,
7436 setaddr, NULL_RTX, iunsignedp,
7439 /* Extract the bit we want to examine. */
7440 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7441 gen_rtx_MEM (byte_mode, addr),
7442 make_tree (TREE_TYPE (index), rem),
7444 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7445 GET_MODE (target) == byte_mode ? target : 0,
7446 1, OPTAB_LIB_WIDEN);
7448 if (result != target)
7449 convert_move (target, result, 1);
7451 /* Output the code to handle the out-of-range case. */
7454 emit_move_insn (target, const0_rtx);
7459 case WITH_CLEANUP_EXPR:
7460 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7462 WITH_CLEANUP_EXPR_RTL (exp)
7463 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7464 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7465 CLEANUP_EH_ONLY (exp));
7467 /* That's it for this cleanup. */
7468 TREE_OPERAND (exp, 1) = 0;
7470 return WITH_CLEANUP_EXPR_RTL (exp);
7472 case CLEANUP_POINT_EXPR:
7474 /* Start a new binding layer that will keep track of all cleanup
7475 actions to be performed. */
7476 expand_start_bindings (2);
7478 target_temp_slot_level = temp_slot_level;
7480 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7481 /* If we're going to use this value, load it up now. */
7483 op0 = force_not_mem (op0);
7484 preserve_temp_slots (op0);
7485 expand_end_bindings (NULL_TREE, 0, 0);
7490 /* Check for a built-in function. */
7491 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7492 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7494 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7496 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7497 == BUILT_IN_FRONTEND)
7498 return (*lang_hooks.expand_expr) (exp, original_target,
7501 return expand_builtin (exp, target, subtarget, tmode, ignore);
7504 return expand_call (exp, target, ignore);
7506 case NON_LVALUE_EXPR:
7509 case REFERENCE_EXPR:
7510 if (TREE_OPERAND (exp, 0) == error_mark_node)
7513 if (TREE_CODE (type) == UNION_TYPE)
7515 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7517 /* If both input and output are BLKmode, this conversion isn't doing
7518 anything except possibly changing memory attribute. */
7519 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7521 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7524 result = copy_rtx (result);
7525 set_mem_attributes (result, exp, 0);
7530 target = assign_temp (type, 0, 1, 1);
7532 if (GET_CODE (target) == MEM)
7533 /* Store data into beginning of memory target. */
7534 store_expr (TREE_OPERAND (exp, 0),
7535 adjust_address (target, TYPE_MODE (valtype), 0),
7536 modifier == EXPAND_STACK_PARM ? 2 : 0);
7538 else if (GET_CODE (target) == REG)
7539 /* Store this field into a union of the proper type. */
7540 store_field (target,
7541 MIN ((int_size_in_bytes (TREE_TYPE
7542 (TREE_OPERAND (exp, 0)))
7544 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7545 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7546 VOIDmode, 0, type, 0);
7550 /* Return the entire union. */
7554 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7556 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7559 /* If the signedness of the conversion differs and OP0 is
7560 a promoted SUBREG, clear that indication since we now
7561 have to do the proper extension. */
7562 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7563 && GET_CODE (op0) == SUBREG)
7564 SUBREG_PROMOTED_VAR_P (op0) = 0;
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7570 if (GET_MODE (op0) == mode)
7573 /* If OP0 is a constant, just convert it into the proper mode. */
7574 if (CONSTANT_P (op0))
7576 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7577 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7579 if (modifier == EXPAND_INITIALIZER)
7580 return simplify_gen_subreg (mode, op0, inner_mode,
7581 subreg_lowpart_offset (mode,
7584 return convert_modes (mode, inner_mode, op0,
7585 TREE_UNSIGNED (inner_type));
7588 if (modifier == EXPAND_INITIALIZER)
7589 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7593 convert_to_mode (mode, op0,
7594 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7596 convert_move (target, op0,
7597 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7600 case VIEW_CONVERT_EXPR:
7601 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7603 /* If the input and output modes are both the same, we are done.
7604 Otherwise, if neither mode is BLKmode and both are integral and within
7605 a word, we can use gen_lowpart. If neither is true, make sure the
7606 operand is in memory and convert the MEM to the new mode. */
7607 if (TYPE_MODE (type) == GET_MODE (op0))
7609 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7610 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7611 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7612 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7613 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7614 op0 = gen_lowpart (TYPE_MODE (type), op0);
7615 else if (GET_CODE (op0) != MEM)
7617 /* If the operand is not a MEM, force it into memory. Since we
7618 are going to be be changing the mode of the MEM, don't call
7619 force_const_mem for constants because we don't allow pool
7620 constants to change mode. */
7621 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7623 if (TREE_ADDRESSABLE (exp))
7626 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7628 = assign_stack_temp_for_type
7629 (TYPE_MODE (inner_type),
7630 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7632 emit_move_insn (target, op0);
7636 /* At this point, OP0 is in the correct mode. If the output type is such
7637 that the operand is known to be aligned, indicate that it is.
7638 Otherwise, we need only be concerned about alignment for non-BLKmode
7640 if (GET_CODE (op0) == MEM)
7642 op0 = copy_rtx (op0);
7644 if (TYPE_ALIGN_OK (type))
7645 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7646 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7647 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7650 HOST_WIDE_INT temp_size
7651 = MAX (int_size_in_bytes (inner_type),
7652 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7653 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7654 temp_size, 0, type);
7655 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7657 if (TREE_ADDRESSABLE (exp))
7660 if (GET_MODE (op0) == BLKmode)
7661 emit_block_move (new_with_op0_mode, op0,
7662 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7663 (modifier == EXPAND_STACK_PARM
7664 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7666 emit_move_insn (new_with_op0_mode, op0);
7671 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7677 this_optab = ! unsignedp && flag_trapv
7678 && (GET_MODE_CLASS (mode) == MODE_INT)
7679 ? addv_optab : add_optab;
7681 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7682 something else, make sure we add the register to the constant and
7683 then to the other thing. This case can occur during strength
7684 reduction and doing it this way will produce better code if the
7685 frame pointer or argument pointer is eliminated.
7687 fold-const.c will ensure that the constant is always in the inner
7688 PLUS_EXPR, so the only case we need to do anything about is if
7689 sp, ap, or fp is our second argument, in which case we must swap
7690 the innermost first argument and our second argument. */
7692 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7693 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7694 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7695 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7696 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7697 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7699 tree t = TREE_OPERAND (exp, 1);
7701 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7702 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7705 /* If the result is to be ptr_mode and we are adding an integer to
7706 something, we might be forming a constant. So try to use
7707 plus_constant. If it produces a sum and we can't accept it,
7708 use force_operand. This allows P = &ARR[const] to generate
7709 efficient code on machines where a SYMBOL_REF is not a valid
7712 If this is an EXPAND_SUM call, always return the sum. */
7713 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7714 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7716 if (modifier == EXPAND_STACK_PARM)
7718 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7719 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7720 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7724 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7726 /* Use immed_double_const to ensure that the constant is
7727 truncated according to the mode of OP1, then sign extended
7728 to a HOST_WIDE_INT. Using the constant directly can result
7729 in non-canonical RTL in a 64x32 cross compile. */
7731 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7733 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7734 op1 = plus_constant (op1, INTVAL (constant_part));
7735 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7736 op1 = force_operand (op1, target);
7740 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7741 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7742 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7747 (modifier == EXPAND_INITIALIZER
7748 ? EXPAND_INITIALIZER : EXPAND_SUM));
7749 if (! CONSTANT_P (op0))
7751 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7752 VOIDmode, modifier);
7753 /* Return a PLUS if modifier says it's OK. */
7754 if (modifier == EXPAND_SUM
7755 || modifier == EXPAND_INITIALIZER)
7756 return simplify_gen_binary (PLUS, mode, op0, op1);
7759 /* Use immed_double_const to ensure that the constant is
7760 truncated according to the mode of OP1, then sign extended
7761 to a HOST_WIDE_INT. Using the constant directly can result
7762 in non-canonical RTL in a 64x32 cross compile. */
7764 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7766 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7767 op0 = plus_constant (op0, INTVAL (constant_part));
7768 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7769 op0 = force_operand (op0, target);
7774 /* No sense saving up arithmetic to be done
7775 if it's all in the wrong mode to form part of an address.
7776 And force_operand won't know whether to sign-extend or
7778 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7779 || mode != ptr_mode)
7781 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7782 subtarget, &op0, &op1, 0);
7783 if (op0 == const0_rtx)
7785 if (op1 == const0_rtx)
7790 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7791 subtarget, &op0, &op1, modifier);
7792 return simplify_gen_binary (PLUS, mode, op0, op1);
7795 /* For initializers, we are allowed to return a MINUS of two
7796 symbolic constants. Here we handle all cases when both operands
7798 /* Handle difference of two symbolic constants,
7799 for the sake of an initializer. */
7800 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7801 && really_constant_p (TREE_OPERAND (exp, 0))
7802 && really_constant_p (TREE_OPERAND (exp, 1)))
7804 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7805 NULL_RTX, &op0, &op1, modifier);
7807 /* If the last operand is a CONST_INT, use plus_constant of
7808 the negated constant. Else make the MINUS. */
7809 if (GET_CODE (op1) == CONST_INT)
7810 return plus_constant (op0, - INTVAL (op1));
7812 return gen_rtx_MINUS (mode, op0, op1);
7815 this_optab = ! unsignedp && flag_trapv
7816 && (GET_MODE_CLASS(mode) == MODE_INT)
7817 ? subv_optab : sub_optab;
7819 /* No sense saving up arithmetic to be done
7820 if it's all in the wrong mode to form part of an address.
7821 And force_operand won't know whether to sign-extend or
7823 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7824 || mode != ptr_mode)
7827 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7828 subtarget, &op0, &op1, modifier);
7830 /* Convert A - const to A + (-const). */
7831 if (GET_CODE (op1) == CONST_INT)
7833 op1 = negate_rtx (mode, op1);
7834 return simplify_gen_binary (PLUS, mode, op0, op1);
7840 /* If first operand is constant, swap them.
7841 Thus the following special case checks need only
7842 check the second operand. */
7843 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7845 tree t1 = TREE_OPERAND (exp, 0);
7846 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7847 TREE_OPERAND (exp, 1) = t1;
7850 /* Attempt to return something suitable for generating an
7851 indexed address, for machines that support that. */
7853 if (modifier == EXPAND_SUM && mode == ptr_mode
7854 && host_integerp (TREE_OPERAND (exp, 1), 0))
7856 tree exp1 = TREE_OPERAND (exp, 1);
7858 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7861 if (GET_CODE (op0) != REG)
7862 op0 = force_operand (op0, NULL_RTX);
7863 if (GET_CODE (op0) != REG)
7864 op0 = copy_to_mode_reg (mode, op0);
7866 return gen_rtx_MULT (mode, op0,
7867 gen_int_mode (tree_low_cst (exp1, 0),
7868 TYPE_MODE (TREE_TYPE (exp1))));
7871 if (modifier == EXPAND_STACK_PARM)
7874 /* Check for multiplying things that have been extended
7875 from a narrower type. If this machine supports multiplying
7876 in that narrower type with a result in the desired type,
7877 do it that way, and avoid the explicit type-conversion. */
7878 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7879 && TREE_CODE (type) == INTEGER_TYPE
7880 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7881 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7882 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7883 && int_fits_type_p (TREE_OPERAND (exp, 1),
7884 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7885 /* Don't use a widening multiply if a shift will do. */
7886 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7887 > HOST_BITS_PER_WIDE_INT)
7888 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7890 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7891 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7893 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7894 /* If both operands are extended, they must either both
7895 be zero-extended or both be sign-extended. */
7896 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7898 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7900 enum machine_mode innermode
7901 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7902 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7903 ? smul_widen_optab : umul_widen_optab);
7904 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7905 ? umul_widen_optab : smul_widen_optab);
7906 if (mode == GET_MODE_WIDER_MODE (innermode))
7908 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7910 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7911 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7912 TREE_OPERAND (exp, 1),
7913 NULL_RTX, &op0, &op1, 0);
7915 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7916 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7917 NULL_RTX, &op0, &op1, 0);
7920 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7921 && innermode == word_mode)
7924 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7925 NULL_RTX, VOIDmode, 0);
7926 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7927 op1 = convert_modes (innermode, mode,
7928 expand_expr (TREE_OPERAND (exp, 1),
7929 NULL_RTX, VOIDmode, 0),
7932 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7933 NULL_RTX, VOIDmode, 0);
7934 temp = expand_binop (mode, other_optab, op0, op1, target,
7935 unsignedp, OPTAB_LIB_WIDEN);
7936 htem = expand_mult_highpart_adjust (innermode,
7937 gen_highpart (innermode, temp),
7939 gen_highpart (innermode, temp),
7941 emit_move_insn (gen_highpart (innermode, temp), htem);
7946 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7947 subtarget, &op0, &op1, 0);
7948 return expand_mult (mode, op0, op1, target, unsignedp);
7950 case TRUNC_DIV_EXPR:
7951 case FLOOR_DIV_EXPR:
7953 case ROUND_DIV_EXPR:
7954 case EXACT_DIV_EXPR:
7955 if (modifier == EXPAND_STACK_PARM)
7957 /* Possible optimization: compute the dividend with EXPAND_SUM
7958 then if the divisor is constant can optimize the case
7959 where some terms of the dividend have coeffs divisible by it. */
7960 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7961 subtarget, &op0, &op1, 0);
7962 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7965 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7966 expensive divide. If not, combine will rebuild the original
7968 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7969 && TREE_CODE (type) == REAL_TYPE
7970 && !real_onep (TREE_OPERAND (exp, 0)))
7971 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7972 build (RDIV_EXPR, type,
7973 build_real (type, dconst1),
7974 TREE_OPERAND (exp, 1))),
7975 target, tmode, modifier);
7976 this_optab = sdiv_optab;
7979 case TRUNC_MOD_EXPR:
7980 case FLOOR_MOD_EXPR:
7982 case ROUND_MOD_EXPR:
7983 if (modifier == EXPAND_STACK_PARM)
7985 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7986 subtarget, &op0, &op1, 0);
7987 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7989 case FIX_ROUND_EXPR:
7990 case FIX_FLOOR_EXPR:
7992 abort (); /* Not used for C. */
7994 case FIX_TRUNC_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7996 if (target == 0 || modifier == EXPAND_STACK_PARM)
7997 target = gen_reg_rtx (mode);
7998 expand_fix (target, op0, unsignedp);
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8003 if (target == 0 || modifier == EXPAND_STACK_PARM)
8004 target = gen_reg_rtx (mode);
8005 /* expand_float can't figure out what to do if FROM has VOIDmode.
8006 So give it the correct mode. With -O, cse will optimize this. */
8007 if (GET_MODE (op0) == VOIDmode)
8008 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8010 expand_float (target, op0,
8011 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8016 if (modifier == EXPAND_STACK_PARM)
8018 temp = expand_unop (mode,
8019 ! unsignedp && flag_trapv
8020 && (GET_MODE_CLASS(mode) == MODE_INT)
8021 ? negv_optab : neg_optab, op0, target, 0);
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8028 if (modifier == EXPAND_STACK_PARM)
8031 /* ABS_EXPR is not valid for complex arguments. */
8032 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8033 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8036 /* Unsigned abs is simply the operand. Testing here means we don't
8037 risk generating incorrect code below. */
8038 if (TREE_UNSIGNED (type))
8041 return expand_abs (mode, op0, target, unsignedp,
8042 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8046 target = original_target;
8048 || modifier == EXPAND_STACK_PARM
8049 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8050 || GET_MODE (target) != mode
8051 || (GET_CODE (target) == REG
8052 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8053 target = gen_reg_rtx (mode);
8054 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8055 target, &op0, &op1, 0);
8057 /* First try to do it with a special MIN or MAX instruction.
8058 If that does not win, use a conditional jump to select the proper
8060 this_optab = (TREE_UNSIGNED (type)
8061 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8062 : (code == MIN_EXPR ? smin_optab : smax_optab));
8064 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8069 /* At this point, a MEM target is no longer useful; we will get better
8072 if (GET_CODE (target) == MEM)
8073 target = gen_reg_rtx (mode);
8075 /* If op1 was placed in target, swap op0 and op1. */
8076 if (target != op0 && target == op1)
8084 emit_move_insn (target, op0);
8086 op0 = gen_label_rtx ();
8088 /* If this mode is an integer too wide to compare properly,
8089 compare word by word. Rely on cse to optimize constant cases. */
8090 if (GET_MODE_CLASS (mode) == MODE_INT
8091 && ! can_compare_p (GE, mode, ccp_jump))
8093 if (code == MAX_EXPR)
8094 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8095 target, op1, NULL_RTX, op0);
8097 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8098 op1, target, NULL_RTX, op0);
8102 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8103 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8104 unsignedp, mode, NULL_RTX, NULL_RTX,
8107 emit_move_insn (target, op1);
8112 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8113 if (modifier == EXPAND_STACK_PARM)
8115 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8120 /* ??? Can optimize bitwise operations with one arg constant.
8121 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8122 and (a bitwise1 b) bitwise2 b (etc)
8123 but that is probably not worth while. */
8125 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8126 boolean values when we want in all cases to compute both of them. In
8127 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8128 as actual zero-or-1 values and then bitwise anding. In cases where
8129 there cannot be any side effects, better code would be made by
8130 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8131 how to recognize those cases. */
8133 case TRUTH_AND_EXPR:
8135 this_optab = and_optab;
8140 this_optab = ior_optab;
8143 case TRUTH_XOR_EXPR:
8145 this_optab = xor_optab;
8152 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8154 if (modifier == EXPAND_STACK_PARM)
8156 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8157 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8160 /* Could determine the answer when only additive constants differ. Also,
8161 the addition of one can be handled by changing the condition. */
8168 case UNORDERED_EXPR:
8175 temp = do_store_flag (exp,
8176 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8177 tmode != VOIDmode ? tmode : mode, 0);
8181 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8182 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8184 && GET_CODE (original_target) == REG
8185 && (GET_MODE (original_target)
8186 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8188 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8191 /* If temp is constant, we can just compute the result. */
8192 if (GET_CODE (temp) == CONST_INT)
8194 if (INTVAL (temp) != 0)
8195 emit_move_insn (target, const1_rtx);
8197 emit_move_insn (target, const0_rtx);
8202 if (temp != original_target)
8204 enum machine_mode mode1 = GET_MODE (temp);
8205 if (mode1 == VOIDmode)
8206 mode1 = tmode != VOIDmode ? tmode : mode;
8208 temp = copy_to_mode_reg (mode1, temp);
8211 op1 = gen_label_rtx ();
8212 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8213 GET_MODE (temp), unsignedp, op1);
8214 emit_move_insn (temp, const1_rtx);
8219 /* If no set-flag instruction, must generate a conditional
8220 store into a temporary variable. Drop through
8221 and handle this like && and ||. */
8223 case TRUTH_ANDIF_EXPR:
8224 case TRUTH_ORIF_EXPR:
8227 || modifier == EXPAND_STACK_PARM
8228 || ! safe_from_p (target, exp, 1)
8229 /* Make sure we don't have a hard reg (such as function's return
8230 value) live across basic blocks, if not optimizing. */
8231 || (!optimize && GET_CODE (target) == REG
8232 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8233 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8236 emit_clr_insn (target);
8238 op1 = gen_label_rtx ();
8239 jumpifnot (exp, op1);
8242 emit_0_to_1_insn (target);
8245 return ignore ? const0_rtx : target;
8247 case TRUTH_NOT_EXPR:
8248 if (modifier == EXPAND_STACK_PARM)
8250 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8251 /* The parser is careful to generate TRUTH_NOT_EXPR
8252 only with operands that are always zero or one. */
8253 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8254 target, 1, OPTAB_LIB_WIDEN);
8260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8262 return expand_expr (TREE_OPERAND (exp, 1),
8263 (ignore ? const0_rtx : target),
8264 VOIDmode, modifier);
8267 /* If we would have a "singleton" (see below) were it not for a
8268 conversion in each arm, bring that conversion back out. */
8269 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8270 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8271 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8272 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8274 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8275 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8277 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8278 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8279 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8280 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8281 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8282 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8283 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8284 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8285 return expand_expr (build1 (NOP_EXPR, type,
8286 build (COND_EXPR, TREE_TYPE (iftrue),
8287 TREE_OPERAND (exp, 0),
8289 target, tmode, modifier);
8293 /* Note that COND_EXPRs whose type is a structure or union
8294 are required to be constructed to contain assignments of
8295 a temporary variable, so that we can evaluate them here
8296 for side effect only. If type is void, we must do likewise. */
8298 /* If an arm of the branch requires a cleanup,
8299 only that cleanup is performed. */
8302 tree binary_op = 0, unary_op = 0;
8304 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8305 convert it to our mode, if necessary. */
8306 if (integer_onep (TREE_OPERAND (exp, 1))
8307 && integer_zerop (TREE_OPERAND (exp, 2))
8308 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8312 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8317 if (modifier == EXPAND_STACK_PARM)
8319 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8320 if (GET_MODE (op0) == mode)
8324 target = gen_reg_rtx (mode);
8325 convert_move (target, op0, unsignedp);
8329 /* Check for X ? A + B : A. If we have this, we can copy A to the
8330 output and conditionally add B. Similarly for unary operations.
8331 Don't do this if X has side-effects because those side effects
8332 might affect A or B and the "?" operation is a sequence point in
8333 ANSI. (operand_equal_p tests for side effects.) */
8335 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8336 && operand_equal_p (TREE_OPERAND (exp, 2),
8337 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8338 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8339 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8340 && operand_equal_p (TREE_OPERAND (exp, 1),
8341 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8342 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8343 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8344 && operand_equal_p (TREE_OPERAND (exp, 2),
8345 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8346 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8347 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8348 && operand_equal_p (TREE_OPERAND (exp, 1),
8349 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8350 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8352 /* If we are not to produce a result, we have no target. Otherwise,
8353 if a target was specified use it; it will not be used as an
8354 intermediate target unless it is safe. If no target, use a
8359 else if (modifier == EXPAND_STACK_PARM)
8360 temp = assign_temp (type, 0, 0, 1);
8361 else if (original_target
8362 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8363 || (singleton && GET_CODE (original_target) == REG
8364 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8365 && original_target == var_rtx (singleton)))
8366 && GET_MODE (original_target) == mode
8367 #ifdef HAVE_conditional_move
8368 && (! can_conditionally_move_p (mode)
8369 || GET_CODE (original_target) == REG
8370 || TREE_ADDRESSABLE (type))
8372 && (GET_CODE (original_target) != MEM
8373 || TREE_ADDRESSABLE (type)))
8374 temp = original_target;
8375 else if (TREE_ADDRESSABLE (type))
8378 temp = assign_temp (type, 0, 0, 1);
8380 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8381 do the test of X as a store-flag operation, do this as
8382 A + ((X != 0) << log C). Similarly for other simple binary
8383 operators. Only do for C == 1 if BRANCH_COST is low. */
8384 if (temp && singleton && binary_op
8385 && (TREE_CODE (binary_op) == PLUS_EXPR
8386 || TREE_CODE (binary_op) == MINUS_EXPR
8387 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8388 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8389 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8390 : integer_onep (TREE_OPERAND (binary_op, 1)))
8391 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8395 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8396 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8397 ? addv_optab : add_optab)
8398 : TREE_CODE (binary_op) == MINUS_EXPR
8399 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8400 ? subv_optab : sub_optab)
8401 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8404 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8405 if (singleton == TREE_OPERAND (exp, 1))
8406 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8408 cond = TREE_OPERAND (exp, 0);
8410 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8412 mode, BRANCH_COST <= 1);
8414 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8415 result = expand_shift (LSHIFT_EXPR, mode, result,
8416 build_int_2 (tree_log2
8420 (safe_from_p (temp, singleton, 1)
8421 ? temp : NULL_RTX), 0);
8425 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8426 return expand_binop (mode, boptab, op1, result, temp,
8427 unsignedp, OPTAB_LIB_WIDEN);
8431 do_pending_stack_adjust ();
8433 op0 = gen_label_rtx ();
8435 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8439 /* If the target conflicts with the other operand of the
8440 binary op, we can't use it. Also, we can't use the target
8441 if it is a hard register, because evaluating the condition
8442 might clobber it. */
8444 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8445 || (GET_CODE (temp) == REG
8446 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8447 temp = gen_reg_rtx (mode);
8448 store_expr (singleton, temp,
8449 modifier == EXPAND_STACK_PARM ? 2 : 0);
8452 expand_expr (singleton,
8453 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8454 if (singleton == TREE_OPERAND (exp, 1))
8455 jumpif (TREE_OPERAND (exp, 0), op0);
8457 jumpifnot (TREE_OPERAND (exp, 0), op0);
8459 start_cleanup_deferral ();
8460 if (binary_op && temp == 0)
8461 /* Just touch the other operand. */
8462 expand_expr (TREE_OPERAND (binary_op, 1),
8463 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8465 store_expr (build (TREE_CODE (binary_op), type,
8466 make_tree (type, temp),
8467 TREE_OPERAND (binary_op, 1)),
8468 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8470 store_expr (build1 (TREE_CODE (unary_op), type,
8471 make_tree (type, temp)),
8472 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8475 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8476 comparison operator. If we have one of these cases, set the
8477 output to A, branch on A (cse will merge these two references),
8478 then set the output to FOO. */
8480 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8481 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8482 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8483 TREE_OPERAND (exp, 1), 0)
8484 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8485 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8486 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8488 if (GET_CODE (temp) == REG
8489 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8490 temp = gen_reg_rtx (mode);
8491 store_expr (TREE_OPERAND (exp, 1), temp,
8492 modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 jumpif (TREE_OPERAND (exp, 0), op0);
8495 start_cleanup_deferral ();
8496 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8497 store_expr (TREE_OPERAND (exp, 2), temp,
8498 modifier == EXPAND_STACK_PARM ? 2 : 0);
8500 expand_expr (TREE_OPERAND (exp, 2),
8501 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8505 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8506 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8507 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8508 TREE_OPERAND (exp, 2), 0)
8509 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8510 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8511 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8513 if (GET_CODE (temp) == REG
8514 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8515 temp = gen_reg_rtx (mode);
8516 store_expr (TREE_OPERAND (exp, 2), temp,
8517 modifier == EXPAND_STACK_PARM ? 2 : 0);
8518 jumpifnot (TREE_OPERAND (exp, 0), op0);
8520 start_cleanup_deferral ();
8521 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8522 store_expr (TREE_OPERAND (exp, 1), temp,
8523 modifier == EXPAND_STACK_PARM ? 2 : 0);
8525 expand_expr (TREE_OPERAND (exp, 1),
8526 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8531 op1 = gen_label_rtx ();
8532 jumpifnot (TREE_OPERAND (exp, 0), op0);
8534 start_cleanup_deferral ();
8536 /* One branch of the cond can be void, if it never returns. For
8537 example A ? throw : E */
8539 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8540 store_expr (TREE_OPERAND (exp, 1), temp,
8541 modifier == EXPAND_STACK_PARM ? 2 : 0);
8543 expand_expr (TREE_OPERAND (exp, 1),
8544 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8545 end_cleanup_deferral ();
8547 emit_jump_insn (gen_jump (op1));
8550 start_cleanup_deferral ();
8552 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8553 store_expr (TREE_OPERAND (exp, 2), temp,
8554 modifier == EXPAND_STACK_PARM ? 2 : 0);
8556 expand_expr (TREE_OPERAND (exp, 2),
8557 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8560 end_cleanup_deferral ();
8571 /* Something needs to be initialized, but we didn't know
8572 where that thing was when building the tree. For example,
8573 it could be the return value of a function, or a parameter
8574 to a function which lays down in the stack, or a temporary
8575 variable which must be passed by reference.
8577 We guarantee that the expression will either be constructed
8578 or copied into our original target. */
8580 tree slot = TREE_OPERAND (exp, 0);
8581 tree cleanups = NULL_TREE;
8584 if (TREE_CODE (slot) != VAR_DECL)
8588 target = original_target;
8590 /* Set this here so that if we get a target that refers to a
8591 register variable that's already been used, put_reg_into_stack
8592 knows that it should fix up those uses. */
8593 TREE_USED (slot) = 1;
8597 if (DECL_RTL_SET_P (slot))
8599 target = DECL_RTL (slot);
8600 /* If we have already expanded the slot, so don't do
8602 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8607 target = assign_temp (type, 2, 0, 1);
8608 /* All temp slots at this level must not conflict. */
8609 preserve_temp_slots (target);
8610 SET_DECL_RTL (slot, target);
8611 if (TREE_ADDRESSABLE (slot))
8612 put_var_into_stack (slot, /*rescan=*/false);
8614 /* Since SLOT is not known to the called function
8615 to belong to its stack frame, we must build an explicit
8616 cleanup. This case occurs when we must build up a reference
8617 to pass the reference as an argument. In this case,
8618 it is very likely that such a reference need not be
8621 if (TREE_OPERAND (exp, 2) == 0)
8622 TREE_OPERAND (exp, 2)
8623 = (*lang_hooks.maybe_build_cleanup) (slot);
8624 cleanups = TREE_OPERAND (exp, 2);
8629 /* This case does occur, when expanding a parameter which
8630 needs to be constructed on the stack. The target
8631 is the actual stack address that we want to initialize.
8632 The function we call will perform the cleanup in this case. */
8634 /* If we have already assigned it space, use that space,
8635 not target that we were passed in, as our target
8636 parameter is only a hint. */
8637 if (DECL_RTL_SET_P (slot))
8639 target = DECL_RTL (slot);
8640 /* If we have already expanded the slot, so don't do
8642 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8647 SET_DECL_RTL (slot, target);
8648 /* If we must have an addressable slot, then make sure that
8649 the RTL that we just stored in slot is OK. */
8650 if (TREE_ADDRESSABLE (slot))
8651 put_var_into_stack (slot, /*rescan=*/true);
8655 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8656 /* Mark it as expanded. */
8657 TREE_OPERAND (exp, 1) = NULL_TREE;
8659 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8661 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8668 tree lhs = TREE_OPERAND (exp, 0);
8669 tree rhs = TREE_OPERAND (exp, 1);
8671 temp = expand_assignment (lhs, rhs, ! ignore);
8677 /* If lhs is complex, expand calls in rhs before computing it.
8678 That's so we don't compute a pointer and save it over a
8679 call. If lhs is simple, compute it first so we can give it
8680 as a target if the rhs is just a call. This avoids an
8681 extra temp and copy and that prevents a partial-subsumption
8682 which makes bad code. Actually we could treat
8683 component_ref's of vars like vars. */
8685 tree lhs = TREE_OPERAND (exp, 0);
8686 tree rhs = TREE_OPERAND (exp, 1);
8690 /* Check for |= or &= of a bitfield of size one into another bitfield
8691 of size 1. In this case, (unless we need the result of the
8692 assignment) we can do this more efficiently with a
8693 test followed by an assignment, if necessary.
8695 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8696 things change so we do, this code should be enhanced to
8699 && TREE_CODE (lhs) == COMPONENT_REF
8700 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8701 || TREE_CODE (rhs) == BIT_AND_EXPR)
8702 && TREE_OPERAND (rhs, 0) == lhs
8703 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8704 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8705 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8707 rtx label = gen_label_rtx ();
8709 do_jump (TREE_OPERAND (rhs, 1),
8710 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8711 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8712 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8713 (TREE_CODE (rhs) == BIT_IOR_EXPR
8715 : integer_zero_node)),
8717 do_pending_stack_adjust ();
8722 temp = expand_assignment (lhs, rhs, ! ignore);
8728 if (!TREE_OPERAND (exp, 0))
8729 expand_null_return ();
8731 expand_return (TREE_OPERAND (exp, 0));
8734 case PREINCREMENT_EXPR:
8735 case PREDECREMENT_EXPR:
8736 return expand_increment (exp, 0, ignore);
8738 case POSTINCREMENT_EXPR:
8739 case POSTDECREMENT_EXPR:
8740 /* Faster to treat as pre-increment if result is not used. */
8741 return expand_increment (exp, ! ignore, ignore);
8744 if (modifier == EXPAND_STACK_PARM)
8746 /* Are we taking the address of a nested function? */
8747 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8748 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8749 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8750 && ! TREE_STATIC (exp))
8752 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8753 op0 = force_operand (op0, target);
8755 /* If we are taking the address of something erroneous, just
8757 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8759 /* If we are taking the address of a constant and are at the
8760 top level, we have to use output_constant_def since we can't
8761 call force_const_mem at top level. */
8763 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8764 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8766 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8769 /* We make sure to pass const0_rtx down if we came in with
8770 ignore set, to avoid doing the cleanups twice for something. */
8771 op0 = expand_expr (TREE_OPERAND (exp, 0),
8772 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8773 (modifier == EXPAND_INITIALIZER
8774 ? modifier : EXPAND_CONST_ADDRESS));
8776 /* If we are going to ignore the result, OP0 will have been set
8777 to const0_rtx, so just return it. Don't get confused and
8778 think we are taking the address of the constant. */
8782 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8783 clever and returns a REG when given a MEM. */
8784 op0 = protect_from_queue (op0, 1);
8786 /* We would like the object in memory. If it is a constant, we can
8787 have it be statically allocated into memory. For a non-constant,
8788 we need to allocate some memory and store the value into it. */
8790 if (CONSTANT_P (op0))
8791 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8793 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8794 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8795 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8797 /* If the operand is a SAVE_EXPR, we can deal with this by
8798 forcing the SAVE_EXPR into memory. */
8799 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8801 put_var_into_stack (TREE_OPERAND (exp, 0),
8803 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8807 /* If this object is in a register, it can't be BLKmode. */
8808 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8809 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8811 if (GET_CODE (op0) == PARALLEL)
8812 /* Handle calls that pass values in multiple
8813 non-contiguous locations. The Irix 6 ABI has examples
8815 emit_group_store (memloc, op0, inner_type,
8816 int_size_in_bytes (inner_type));
8818 emit_move_insn (memloc, op0);
8824 if (GET_CODE (op0) != MEM)
8827 mark_temp_addr_taken (op0);
8828 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8830 op0 = XEXP (op0, 0);
8831 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8832 op0 = convert_memory_address (ptr_mode, op0);
8836 /* If OP0 is not aligned as least as much as the type requires, we
8837 need to make a temporary, copy OP0 to it, and take the address of
8838 the temporary. We want to use the alignment of the type, not of
8839 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8840 the test for BLKmode means that can't happen. The test for
8841 BLKmode is because we never make mis-aligned MEMs with
8844 We don't need to do this at all if the machine doesn't have
8845 strict alignment. */
8846 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8847 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8849 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8851 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8854 if (TYPE_ALIGN_OK (inner_type))
8857 if (TREE_ADDRESSABLE (inner_type))
8859 /* We can't make a bitwise copy of this object, so fail. */
8860 error ("cannot take the address of an unaligned member");
8864 new = assign_stack_temp_for_type
8865 (TYPE_MODE (inner_type),
8866 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8867 : int_size_in_bytes (inner_type),
8868 1, build_qualified_type (inner_type,
8869 (TYPE_QUALS (inner_type)
8870 | TYPE_QUAL_CONST)));
8872 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8873 (modifier == EXPAND_STACK_PARM
8874 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8879 op0 = force_operand (XEXP (op0, 0), target);
8883 && GET_CODE (op0) != REG
8884 && modifier != EXPAND_CONST_ADDRESS
8885 && modifier != EXPAND_INITIALIZER
8886 && modifier != EXPAND_SUM)
8887 op0 = force_reg (Pmode, op0);
8889 if (GET_CODE (op0) == REG
8890 && ! REG_USERVAR_P (op0))
8891 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8893 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8894 op0 = convert_memory_address (ptr_mode, op0);
8898 case ENTRY_VALUE_EXPR:
8901 /* COMPLEX type for Extended Pascal & Fortran */
8904 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8907 /* Get the rtx code of the operands. */
8908 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8909 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8912 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8916 /* Move the real (op0) and imaginary (op1) parts to their location. */
8917 emit_move_insn (gen_realpart (mode, target), op0);
8918 emit_move_insn (gen_imagpart (mode, target), op1);
8920 insns = get_insns ();
8923 /* Complex construction should appear as a single unit. */
8924 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8925 each with a separate pseudo as destination.
8926 It's not correct for flow to treat them as a unit. */
8927 if (GET_CODE (target) != CONCAT)
8928 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8936 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8937 return gen_realpart (mode, op0);
8940 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8941 return gen_imagpart (mode, op0);
8945 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8949 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8952 target = gen_reg_rtx (mode);
8956 /* Store the realpart and the negated imagpart to target. */
8957 emit_move_insn (gen_realpart (partmode, target),
8958 gen_realpart (partmode, op0));
8960 imag_t = gen_imagpart (partmode, target);
8961 temp = expand_unop (partmode,
8962 ! unsignedp && flag_trapv
8963 && (GET_MODE_CLASS(partmode) == MODE_INT)
8964 ? negv_optab : neg_optab,
8965 gen_imagpart (partmode, op0), imag_t, 0);
8967 emit_move_insn (imag_t, temp);
8969 insns = get_insns ();
8972 /* Conjugate should appear as a single unit
8973 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8974 each with a separate pseudo as destination.
8975 It's not correct for flow to treat them as a unit. */
8976 if (GET_CODE (target) != CONCAT)
8977 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8984 case TRY_CATCH_EXPR:
8986 tree handler = TREE_OPERAND (exp, 1);
8988 expand_eh_region_start ();
8990 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8992 expand_eh_region_end_cleanup (handler);
8997 case TRY_FINALLY_EXPR:
8999 tree try_block = TREE_OPERAND (exp, 0);
9000 tree finally_block = TREE_OPERAND (exp, 1);
9002 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9004 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9005 is not sufficient, so we cannot expand the block twice.
9006 So we play games with GOTO_SUBROUTINE_EXPR to let us
9007 expand the thing only once. */
9008 /* When not optimizing, we go ahead with this form since
9009 (1) user breakpoints operate more predictably without
9010 code duplication, and
9011 (2) we're not running any of the global optimizers
9012 that would explode in time/space with the highly
9013 connected CFG created by the indirect branching. */
9015 rtx finally_label = gen_label_rtx ();
9016 rtx done_label = gen_label_rtx ();
9017 rtx return_link = gen_reg_rtx (Pmode);
9018 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9019 (tree) finally_label, (tree) return_link);
9020 TREE_SIDE_EFFECTS (cleanup) = 1;
9022 /* Start a new binding layer that will keep track of all cleanup
9023 actions to be performed. */
9024 expand_start_bindings (2);
9025 target_temp_slot_level = temp_slot_level;
9027 expand_decl_cleanup (NULL_TREE, cleanup);
9028 op0 = expand_expr (try_block, target, tmode, modifier);
9030 preserve_temp_slots (op0);
9031 expand_end_bindings (NULL_TREE, 0, 0);
9032 emit_jump (done_label);
9033 emit_label (finally_label);
9034 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9035 emit_indirect_jump (return_link);
9036 emit_label (done_label);
9040 expand_start_bindings (2);
9041 target_temp_slot_level = temp_slot_level;
9043 expand_decl_cleanup (NULL_TREE, finally_block);
9044 op0 = expand_expr (try_block, target, tmode, modifier);
9046 preserve_temp_slots (op0);
9047 expand_end_bindings (NULL_TREE, 0, 0);
9053 case GOTO_SUBROUTINE_EXPR:
9055 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9056 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9057 rtx return_address = gen_label_rtx ();
9058 emit_move_insn (return_link,
9059 gen_rtx_LABEL_REF (Pmode, return_address));
9061 emit_label (return_address);
9066 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9069 return get_exception_pointer (cfun);
9072 /* Function descriptors are not valid except for as
9073 initialization constants, and should not be expanded. */
9077 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9080 /* Here to do an ordinary binary operator, generating an instruction
9081 from the optab already placed in `this_optab'. */
9083 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9084 subtarget, &op0, &op1, 0);
9086 if (modifier == EXPAND_STACK_PARM)
9088 temp = expand_binop (mode, this_optab, op0, op1, target,
9089 unsignedp, OPTAB_LIB_WIDEN);
9095 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9096 when applied to the address of EXP produces an address known to be
9097 aligned more than BIGGEST_ALIGNMENT. */
9100 is_aligning_offset (tree offset, tree exp)
9102 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9103 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9104 || TREE_CODE (offset) == NOP_EXPR
9105 || TREE_CODE (offset) == CONVERT_EXPR
9106 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9107 offset = TREE_OPERAND (offset, 0);
9109 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9110 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9111 if (TREE_CODE (offset) != BIT_AND_EXPR
9112 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9113 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9114 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9117 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9118 It must be NEGATE_EXPR. Then strip any more conversions. */
9119 offset = TREE_OPERAND (offset, 0);
9120 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9121 || TREE_CODE (offset) == NOP_EXPR
9122 || TREE_CODE (offset) == CONVERT_EXPR)
9123 offset = TREE_OPERAND (offset, 0);
9125 if (TREE_CODE (offset) != NEGATE_EXPR)
9128 offset = TREE_OPERAND (offset, 0);
9129 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9130 || TREE_CODE (offset) == NOP_EXPR
9131 || TREE_CODE (offset) == CONVERT_EXPR)
9132 offset = TREE_OPERAND (offset, 0);
9134 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9135 whose type is the same as EXP. */
9136 return (TREE_CODE (offset) == ADDR_EXPR
9137 && (TREE_OPERAND (offset, 0) == exp
9138 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9139 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9140 == TREE_TYPE (exp)))));
9143 /* Return the tree node if an ARG corresponds to a string constant or zero
9144 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9145 in bytes within the string that ARG is accessing. The type of the
9146 offset will be `sizetype'. */
9149 string_constant (tree arg, tree *ptr_offset)
9153 if (TREE_CODE (arg) == ADDR_EXPR
9154 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9156 *ptr_offset = size_zero_node;
9157 return TREE_OPERAND (arg, 0);
9159 else if (TREE_CODE (arg) == PLUS_EXPR)
9161 tree arg0 = TREE_OPERAND (arg, 0);
9162 tree arg1 = TREE_OPERAND (arg, 1);
9167 if (TREE_CODE (arg0) == ADDR_EXPR
9168 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9170 *ptr_offset = convert (sizetype, arg1);
9171 return TREE_OPERAND (arg0, 0);
9173 else if (TREE_CODE (arg1) == ADDR_EXPR
9174 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9176 *ptr_offset = convert (sizetype, arg0);
9177 return TREE_OPERAND (arg1, 0);
9184 /* Expand code for a post- or pre- increment or decrement
9185 and return the RTX for the result.
9186 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9189 expand_increment (tree exp, int post, int ignore)
9193 tree incremented = TREE_OPERAND (exp, 0);
9194 optab this_optab = add_optab;
9196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9197 int op0_is_copy = 0;
9198 int single_insn = 0;
9199 /* 1 means we can't store into OP0 directly,
9200 because it is a subreg narrower than a word,
9201 and we don't dare clobber the rest of the word. */
9204 /* Stabilize any component ref that might need to be
9205 evaluated more than once below. */
9207 || TREE_CODE (incremented) == BIT_FIELD_REF
9208 || (TREE_CODE (incremented) == COMPONENT_REF
9209 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9210 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9211 incremented = stabilize_reference (incremented);
9212 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9213 ones into save exprs so that they don't accidentally get evaluated
9214 more than once by the code below. */
9215 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9216 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9217 incremented = save_expr (incremented);
9219 /* Compute the operands as RTX.
9220 Note whether OP0 is the actual lvalue or a copy of it:
9221 I believe it is a copy iff it is a register or subreg
9222 and insns were generated in computing it. */
9224 temp = get_last_insn ();
9225 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9227 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9228 in place but instead must do sign- or zero-extension during assignment,
9229 so we copy it into a new register and let the code below use it as
9232 Note that we can safely modify this SUBREG since it is know not to be
9233 shared (it was made by the expand_expr call above). */
9235 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9238 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9242 else if (GET_CODE (op0) == SUBREG
9243 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9245 /* We cannot increment this SUBREG in place. If we are
9246 post-incrementing, get a copy of the old value. Otherwise,
9247 just mark that we cannot increment in place. */
9249 op0 = copy_to_reg (op0);
9254 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9255 && temp != get_last_insn ());
9256 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9258 /* Decide whether incrementing or decrementing. */
9259 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9260 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9261 this_optab = sub_optab;
9263 /* Convert decrement by a constant into a negative increment. */
9264 if (this_optab == sub_optab
9265 && GET_CODE (op1) == CONST_INT)
9267 op1 = GEN_INT (-INTVAL (op1));
9268 this_optab = add_optab;
9271 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9272 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9274 /* For a preincrement, see if we can do this with a single instruction. */
9277 icode = (int) this_optab->handlers[(int) mode].insn_code;
9278 if (icode != (int) CODE_FOR_nothing
9279 /* Make sure that OP0 is valid for operands 0 and 1
9280 of the insn we want to queue. */
9281 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9282 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9283 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9287 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9288 then we cannot just increment OP0. We must therefore contrive to
9289 increment the original value. Then, for postincrement, we can return
9290 OP0 since it is a copy of the old value. For preincrement, expand here
9291 unless we can do it with a single insn.
9293 Likewise if storing directly into OP0 would clobber high bits
9294 we need to preserve (bad_subreg). */
9295 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9297 /* This is the easiest way to increment the value wherever it is.
9298 Problems with multiple evaluation of INCREMENTED are prevented
9299 because either (1) it is a component_ref or preincrement,
9300 in which case it was stabilized above, or (2) it is an array_ref
9301 with constant index in an array in a register, which is
9302 safe to reevaluate. */
9303 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9304 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9305 ? MINUS_EXPR : PLUS_EXPR),
9308 TREE_OPERAND (exp, 1));
9310 while (TREE_CODE (incremented) == NOP_EXPR
9311 || TREE_CODE (incremented) == CONVERT_EXPR)
9313 newexp = convert (TREE_TYPE (incremented), newexp);
9314 incremented = TREE_OPERAND (incremented, 0);
9317 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9318 return post ? op0 : temp;
9323 /* We have a true reference to the value in OP0.
9324 If there is an insn to add or subtract in this mode, queue it.
9325 Queueing the increment insn avoids the register shuffling
9326 that often results if we must increment now and first save
9327 the old value for subsequent use. */
9329 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9330 op0 = stabilize (op0);
9333 icode = (int) this_optab->handlers[(int) mode].insn_code;
9334 if (icode != (int) CODE_FOR_nothing
9335 /* Make sure that OP0 is valid for operands 0 and 1
9336 of the insn we want to queue. */
9337 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9338 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9340 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9341 op1 = force_reg (mode, op1);
9343 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9345 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9347 rtx addr = (general_operand (XEXP (op0, 0), mode)
9348 ? force_reg (Pmode, XEXP (op0, 0))
9349 : copy_to_reg (XEXP (op0, 0)));
9352 op0 = replace_equiv_address (op0, addr);
9353 temp = force_reg (GET_MODE (op0), op0);
9354 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9355 op1 = force_reg (mode, op1);
9357 /* The increment queue is LIFO, thus we have to `queue'
9358 the instructions in reverse order. */
9359 enqueue_insn (op0, gen_move_insn (op0, temp));
9360 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9365 /* Preincrement, or we can't increment with one simple insn. */
9367 /* Save a copy of the value before inc or dec, to return it later. */
9368 temp = value = copy_to_reg (op0);
9370 /* Arrange to return the incremented value. */
9371 /* Copy the rtx because expand_binop will protect from the queue,
9372 and the results of that would be invalid for us to return
9373 if our caller does emit_queue before using our result. */
9374 temp = copy_rtx (value = op0);
9376 /* Increment however we can. */
9377 op1 = expand_binop (mode, this_optab, value, op1, op0,
9378 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9380 /* Make sure the value is stored into OP0. */
9382 emit_move_insn (op0, op1);
9387 /* Generate code to calculate EXP using a store-flag instruction
9388 and return an rtx for the result. EXP is either a comparison
9389 or a TRUTH_NOT_EXPR whose operand is a comparison.
9391 If TARGET is nonzero, store the result there if convenient.
9393 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9396 Return zero if there is no suitable set-flag instruction
9397 available on this machine.
9399 Once expand_expr has been called on the arguments of the comparison,
9400 we are committed to doing the store flag, since it is not safe to
9401 re-evaluate the expression. We emit the store-flag insn by calling
9402 emit_store_flag, but only expand the arguments if we have a reason
9403 to believe that emit_store_flag will be successful. If we think that
9404 it will, but it isn't, we have to simulate the store-flag with a
9405 set/jump/set sequence. */
9408 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9411 tree arg0, arg1, type;
9413 enum machine_mode operand_mode;
9417 enum insn_code icode;
9418 rtx subtarget = target;
9421 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9422 result at the end. We can't simply invert the test since it would
9423 have already been inverted if it were valid. This case occurs for
9424 some floating-point comparisons. */
9426 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9427 invert = 1, exp = TREE_OPERAND (exp, 0);
9429 arg0 = TREE_OPERAND (exp, 0);
9430 arg1 = TREE_OPERAND (exp, 1);
9432 /* Don't crash if the comparison was erroneous. */
9433 if (arg0 == error_mark_node || arg1 == error_mark_node)
9436 type = TREE_TYPE (arg0);
9437 operand_mode = TYPE_MODE (type);
9438 unsignedp = TREE_UNSIGNED (type);
9440 /* We won't bother with BLKmode store-flag operations because it would mean
9441 passing a lot of information to emit_store_flag. */
9442 if (operand_mode == BLKmode)
9445 /* We won't bother with store-flag operations involving function pointers
9446 when function pointers must be canonicalized before comparisons. */
9447 #ifdef HAVE_canonicalize_funcptr_for_compare
9448 if (HAVE_canonicalize_funcptr_for_compare
9449 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9450 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9452 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9453 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9454 == FUNCTION_TYPE))))
9461 /* Get the rtx comparison code to use. We know that EXP is a comparison
9462 operation of some type. Some comparisons against 1 and -1 can be
9463 converted to comparisons with zero. Do so here so that the tests
9464 below will be aware that we have a comparison with zero. These
9465 tests will not catch constants in the first operand, but constants
9466 are rarely passed as the first operand. */
9468 switch (TREE_CODE (exp))
9477 if (integer_onep (arg1))
9478 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9480 code = unsignedp ? LTU : LT;
9483 if (! unsignedp && integer_all_onesp (arg1))
9484 arg1 = integer_zero_node, code = LT;
9486 code = unsignedp ? LEU : LE;
9489 if (! unsignedp && integer_all_onesp (arg1))
9490 arg1 = integer_zero_node, code = GE;
9492 code = unsignedp ? GTU : GT;
9495 if (integer_onep (arg1))
9496 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9498 code = unsignedp ? GEU : GE;
9501 case UNORDERED_EXPR:
9527 /* Put a constant second. */
9528 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9530 tem = arg0; arg0 = arg1; arg1 = tem;
9531 code = swap_condition (code);
9534 /* If this is an equality or inequality test of a single bit, we can
9535 do this by shifting the bit being tested to the low-order bit and
9536 masking the result with the constant 1. If the condition was EQ,
9537 we xor it with 1. This does not require an scc insn and is faster
9538 than an scc insn even if we have it.
9540 The code to make this transformation was moved into fold_single_bit_test,
9541 so we just call into the folder and expand its result. */
9543 if ((code == NE || code == EQ)
9544 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9545 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9547 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9548 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9550 target, VOIDmode, EXPAND_NORMAL);
9553 /* Now see if we are likely to be able to do this. Return if not. */
9554 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9557 icode = setcc_gen_code[(int) code];
9558 if (icode == CODE_FOR_nothing
9559 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9561 /* We can only do this if it is one of the special cases that
9562 can be handled without an scc insn. */
9563 if ((code == LT && integer_zerop (arg1))
9564 || (! only_cheap && code == GE && integer_zerop (arg1)))
9566 else if (BRANCH_COST >= 0
9567 && ! only_cheap && (code == NE || code == EQ)
9568 && TREE_CODE (type) != REAL_TYPE
9569 && ((abs_optab->handlers[(int) operand_mode].insn_code
9570 != CODE_FOR_nothing)
9571 || (ffs_optab->handlers[(int) operand_mode].insn_code
9572 != CODE_FOR_nothing)))
9578 if (! get_subtarget (target)
9579 || GET_MODE (subtarget) != operand_mode)
9582 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9585 target = gen_reg_rtx (mode);
9587 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9588 because, if the emit_store_flag does anything it will succeed and
9589 OP0 and OP1 will not be used subsequently. */
9591 result = emit_store_flag (target, code,
9592 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9593 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9594 operand_mode, unsignedp, 1);
9599 result = expand_binop (mode, xor_optab, result, const1_rtx,
9600 result, 0, OPTAB_LIB_WIDEN);
9604 /* If this failed, we have to do this with set/compare/jump/set code. */
9605 if (GET_CODE (target) != REG
9606 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9607 target = gen_reg_rtx (GET_MODE (target));
9609 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9610 result = compare_from_rtx (op0, op1, code, unsignedp,
9611 operand_mode, NULL_RTX);
9612 if (GET_CODE (result) == CONST_INT)
9613 return (((result == const0_rtx && ! invert)
9614 || (result != const0_rtx && invert))
9615 ? const0_rtx : const1_rtx);
9617 /* The code of RESULT may not match CODE if compare_from_rtx
9618 decided to swap its operands and reverse the original code.
9620 We know that compare_from_rtx returns either a CONST_INT or
9621 a new comparison code, so it is safe to just extract the
9622 code from RESULT. */
9623 code = GET_CODE (result);
9625 label = gen_label_rtx ();
9626 if (bcc_gen_fctn[(int) code] == 0)
9629 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9630 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9637 /* Stubs in case we haven't got a casesi insn. */
9639 # define HAVE_casesi 0
9640 # define gen_casesi(a, b, c, d, e) (0)
9641 # define CODE_FOR_casesi CODE_FOR_nothing
9644 /* If the machine does not have a case insn that compares the bounds,
9645 this means extra overhead for dispatch tables, which raises the
9646 threshold for using them. */
9647 #ifndef CASE_VALUES_THRESHOLD
9648 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9649 #endif /* CASE_VALUES_THRESHOLD */
9652 case_values_threshold (void)
9654 return CASE_VALUES_THRESHOLD;
9657 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9658 0 otherwise (i.e. if there is no casesi instruction). */
9660 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9661 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9663 enum machine_mode index_mode = SImode;
9664 int index_bits = GET_MODE_BITSIZE (index_mode);
9665 rtx op1, op2, index;
9666 enum machine_mode op_mode;
9671 /* Convert the index to SImode. */
9672 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9674 enum machine_mode omode = TYPE_MODE (index_type);
9675 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9677 /* We must handle the endpoints in the original mode. */
9678 index_expr = build (MINUS_EXPR, index_type,
9679 index_expr, minval);
9680 minval = integer_zero_node;
9681 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9682 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9683 omode, 1, default_label);
9684 /* Now we can safely truncate. */
9685 index = convert_to_mode (index_mode, index, 0);
9689 if (TYPE_MODE (index_type) != index_mode)
9691 index_expr = convert ((*lang_hooks.types.type_for_size)
9692 (index_bits, 0), index_expr);
9693 index_type = TREE_TYPE (index_expr);
9696 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9699 index = protect_from_queue (index, 0);
9700 do_pending_stack_adjust ();
9702 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9703 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9705 index = copy_to_mode_reg (op_mode, index);
9707 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9709 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9710 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9711 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9712 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9714 op1 = copy_to_mode_reg (op_mode, op1);
9716 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9718 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9719 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9720 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9721 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9723 op2 = copy_to_mode_reg (op_mode, op2);
9725 emit_jump_insn (gen_casesi (index, op1, op2,
9726 table_label, default_label));
9730 /* Attempt to generate a tablejump instruction; same concept. */
9731 #ifndef HAVE_tablejump
9732 #define HAVE_tablejump 0
9733 #define gen_tablejump(x, y) (0)
9736 /* Subroutine of the next function.
9738 INDEX is the value being switched on, with the lowest value
9739 in the table already subtracted.
9740 MODE is its expected mode (needed if INDEX is constant).
9741 RANGE is the length of the jump table.
9742 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9744 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9745 index value is out of range. */
9748 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9753 if (INTVAL (range) > cfun->max_jumptable_ents)
9754 cfun->max_jumptable_ents = INTVAL (range);
9756 /* Do an unsigned comparison (in the proper mode) between the index
9757 expression and the value which represents the length of the range.
9758 Since we just finished subtracting the lower bound of the range
9759 from the index expression, this comparison allows us to simultaneously
9760 check that the original index expression value is both greater than
9761 or equal to the minimum value of the range and less than or equal to
9762 the maximum value of the range. */
9764 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9767 /* If index is in range, it must fit in Pmode.
9768 Convert to Pmode so we can index with it. */
9770 index = convert_to_mode (Pmode, index, 1);
9772 /* Don't let a MEM slip thru, because then INDEX that comes
9773 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9774 and break_out_memory_refs will go to work on it and mess it up. */
9775 #ifdef PIC_CASE_VECTOR_ADDRESS
9776 if (flag_pic && GET_CODE (index) != REG)
9777 index = copy_to_mode_reg (Pmode, index);
9780 /* If flag_force_addr were to affect this address
9781 it could interfere with the tricky assumptions made
9782 about addresses that contain label-refs,
9783 which may be valid only very near the tablejump itself. */
9784 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9785 GET_MODE_SIZE, because this indicates how large insns are. The other
9786 uses should all be Pmode, because they are addresses. This code
9787 could fail if addresses and insns are not the same size. */
9788 index = gen_rtx_PLUS (Pmode,
9789 gen_rtx_MULT (Pmode, index,
9790 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9791 gen_rtx_LABEL_REF (Pmode, table_label));
9792 #ifdef PIC_CASE_VECTOR_ADDRESS
9794 index = PIC_CASE_VECTOR_ADDRESS (index);
9797 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9798 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9799 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9800 RTX_UNCHANGING_P (vector) = 1;
9801 MEM_NOTRAP_P (vector) = 1;
9802 convert_move (temp, vector, 0);
9804 emit_jump_insn (gen_tablejump (temp, table_label));
9806 /* If we are generating PIC code or if the table is PC-relative, the
9807 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9808 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9813 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9814 rtx table_label, rtx default_label)
9818 if (! HAVE_tablejump)
9821 index_expr = fold (build (MINUS_EXPR, index_type,
9822 convert (index_type, index_expr),
9823 convert (index_type, minval)));
9824 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9826 index = protect_from_queue (index, 0);
9827 do_pending_stack_adjust ();
9829 do_tablejump (index, TYPE_MODE (index_type),
9830 convert_modes (TYPE_MODE (index_type),
9831 TYPE_MODE (TREE_TYPE (range)),
9832 expand_expr (range, NULL_RTX,
9834 TREE_UNSIGNED (TREE_TYPE (range))),
9835 table_label, default_label);
9839 /* Nonzero if the mode is a valid vector mode for this architecture.
9840 This returns nonzero even if there is no hardware support for the
9841 vector mode, but we can emulate with narrower modes. */
9844 vector_mode_valid_p (enum machine_mode mode)
9846 enum mode_class class = GET_MODE_CLASS (mode);
9847 enum machine_mode innermode;
9849 /* Doh! What's going on? */
9850 if (class != MODE_VECTOR_INT
9851 && class != MODE_VECTOR_FLOAT)
9854 /* Hardware support. Woo hoo! */
9855 if (VECTOR_MODE_SUPPORTED_P (mode))
9858 innermode = GET_MODE_INNER (mode);
9860 /* We should probably return 1 if requesting V4DI and we have no DI,
9861 but we have V2DI, but this is probably very unlikely. */
9863 /* If we have support for the inner mode, we can safely emulate it.
9864 We may not have V2DI, but me can emulate with a pair of DIs. */
9865 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9868 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9870 const_vector_from_tree (tree exp)
9875 enum machine_mode inner, mode;
9877 mode = TYPE_MODE (TREE_TYPE (exp));
9879 if (is_zeros_p (exp))
9880 return CONST0_RTX (mode);
9882 units = GET_MODE_NUNITS (mode);
9883 inner = GET_MODE_INNER (mode);
9885 v = rtvec_alloc (units);
9887 link = TREE_VECTOR_CST_ELTS (exp);
9888 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9890 elt = TREE_VALUE (link);
9892 if (TREE_CODE (elt) == REAL_CST)
9893 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9896 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9897 TREE_INT_CST_HIGH (elt),
9901 /* Initialize remaining elements to 0. */
9902 for (; i < units; ++i)
9903 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9905 return gen_rtx_raw_CONST_VECTOR (mode, v);
9908 #include "gt-expr.h"