1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
147 static rtx var_rtx (tree);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movmem_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
201 /* These arrays record the insn_code of two different kinds of insns
202 to perform block compares. */
203 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
204 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
206 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 /* This is run once per compilation to set up which modes can be used
213 directly in memory and to initialize the block move optab. */
216 init_expr_once (void)
219 enum machine_mode mode;
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230 /* A scratch register we can modify in-place below to avoid
231 useless RTL allocations. */
232 reg = gen_rtx_REG (VOIDmode, -1);
234 insn = rtx_alloc (INSN);
235 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
236 PATTERN (insn) = pat;
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246 PUT_MODE (reg, mode);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 if (! HARD_REGNO_MODE_OK (regno, mode))
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
283 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
286 mode = GET_MODE_WIDER_MODE (mode))
288 enum machine_mode srcmode;
289 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
290 srcmode = GET_MODE_WIDER_MODE (srcmode))
294 ic = can_extend_p (mode, srcmode, 0);
295 if (ic == CODE_FOR_nothing)
298 PUT_MODE (mem, srcmode);
300 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
301 float_extend_from_mem[mode][srcmode] = true;
306 /* This is run at the start of compiling a function. */
311 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
314 /* Copy data from FROM to TO, where the machine modes are not the same.
315 Both modes may be integer, or both may be floating.
316 UNSIGNEDP should be nonzero if FROM is an unsigned type.
317 This causes zero-extension instead of sign-extension. */
320 convert_move (rtx to, rtx from, int unsignedp)
322 enum machine_mode to_mode = GET_MODE (to);
323 enum machine_mode from_mode = GET_MODE (from);
324 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
325 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
329 /* rtx code for making an equivalent value. */
330 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
331 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
334 if (to_real != from_real)
337 /* If the source and destination are already the same, then there's
342 /* If FROM is a SUBREG that indicates that we have already done at least
343 the required extension, strip it. We don't handle such SUBREGs as
346 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
347 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
348 >= GET_MODE_SIZE (to_mode))
349 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
350 from = gen_lowpart (to_mode, from), from_mode = to_mode;
352 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
355 if (to_mode == from_mode
356 || (from_mode == VOIDmode && CONSTANT_P (from)))
358 emit_move_insn (to, from);
362 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
364 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
367 if (VECTOR_MODE_P (to_mode))
368 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
372 emit_move_insn (to, from);
376 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
378 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
379 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
388 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
395 /* Try converting directly if the insn is supported. */
397 code = tab->handlers[to_mode][from_mode].insn_code;
398 if (code != CODE_FOR_nothing)
400 emit_unop_insn (code, to, from,
401 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
405 /* Otherwise use a libcall. */
406 libcall = tab->handlers[to_mode][from_mode].libfunc;
409 /* This conversion is not implemented yet. */
413 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 insns = get_insns ();
417 emit_libcall_block (insns, to, value,
418 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 : gen_rtx_FLOAT_EXTEND (to_mode, from));
424 /* Handle pointer conversion. */ /* SPEE 900220. */
425 /* Targets are expected to provide conversion insns between PxImode and
426 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
427 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432 if (trunc_optab->handlers[to_mode][full_mode].insn_code
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 if (sext_optab->handlers[full_mode][from_mode].insn_code
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
453 if (to_mode == full_mode)
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
460 /* Now both modes are integers. */
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
504 /* No special multiword conversion insn; do it by hand. */
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
517 lowpart_mode = from_mode;
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
524 /* Compute the value to put in each remaining word. */
526 fill_value = const0_rtx;
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
559 if (fill_value != subword)
560 emit_move_insn (subword, fill_value);
563 insns = get_insns ();
566 emit_no_conflict_block (insns, to, from, NULL_RTX,
567 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
573 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0)))
580 || GET_CODE (from) == SUBREG))
581 from = force_reg (from_mode, from);
582 convert_move (to, gen_lowpart (word_mode, from), 0);
586 /* Now follow all the conversions between integers
587 no more than a word long. */
589 /* For truncation, usually we can just refer to FROM in a narrower mode. */
590 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
591 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
592 GET_MODE_BITSIZE (from_mode)))
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
608 /* Handle extension. */
609 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
616 from = force_not_mem (from);
618 emit_unop_insn (code, to, from, equiv_code);
623 enum machine_mode intermediate;
627 /* Search for a mode to convert via. */
628 for (intermediate = from_mode; intermediate != VOIDmode;
629 intermediate = GET_MODE_WIDER_MODE (intermediate))
630 if (((can_extend_p (to_mode, intermediate, unsignedp)
632 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (intermediate))))
635 && (can_extend_p (intermediate, from_mode, unsignedp)
636 != CODE_FOR_nothing))
638 convert_move (to, convert_to_mode (intermediate, from,
639 unsignedp), unsignedp);
643 /* No suitable intermediate mode.
644 Generate what we need with shifts. */
645 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode), 0);
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
653 emit_move_insn (to, tmp);
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
680 /* Mode combination is not recognized. */
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
694 return convert_modes (mode, VOIDmode, x, unsignedp);
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
736 HOST_WIDE_INT val = INTVAL (x);
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
741 int width = GET_MODE_BITSIZE (oldmode);
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
785 return gen_int_mode (val, mode);
788 return gen_lowpart (mode, x);
791 /* Converting from integer constant into mode is always equivalent to an
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
795 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
797 return simplify_gen_subreg (mode, x, oldmode, 0);
800 temp = gen_reg_rtx (mode);
801 convert_move (temp, x, unsignedp);
805 /* STORE_MAX_PIECES is the number of bytes at a time that we can
806 store efficiently. Due to internal GCC limitations, this is
807 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
808 for an immediate constant. */
810 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
812 /* Determine whether the LEN bytes can be moved by using several move
813 instructions. Return nonzero if a call to move_by_pieces should
817 can_move_by_pieces (unsigned HOST_WIDE_INT len,
818 unsigned int align ATTRIBUTE_UNUSED)
820 return MOVE_BY_PIECES_P (len, align);
823 /* Generate several move instructions to copy LEN bytes from block FROM to
824 block TO. (These are MEM rtx's with BLKmode).
826 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
827 used to push FROM to the stack.
829 ALIGN is maximum stack alignment we can assume.
831 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
832 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
836 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
837 unsigned int align, int endp)
839 struct move_by_pieces data;
840 rtx to_addr, from_addr = XEXP (from, 0);
841 unsigned int max_size = MOVE_MAX_PIECES + 1;
842 enum machine_mode mode = VOIDmode, tmode;
843 enum insn_code icode;
845 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
848 data.from_addr = from_addr;
851 to_addr = XEXP (to, 0);
854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
864 #ifdef STACK_GROWS_DOWNWARD
870 data.to_addr = to_addr;
873 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
874 || GET_CODE (from_addr) == POST_INC
875 || GET_CODE (from_addr) == POST_DEC);
877 data.explicit_inc_from = 0;
878 data.explicit_inc_to = 0;
879 if (data.reverse) data.offset = len;
882 /* If copying requires more than two move insns,
883 copy addresses to registers (to make displacements shorter)
884 and use post-increment if available. */
885 if (!(data.autinc_from && data.autinc_to)
886 && move_by_pieces_ninsns (len, align) > 2)
888 /* Find the mode of the largest move... */
889 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
890 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
891 if (GET_MODE_SIZE (tmode) < max_size)
894 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
896 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
902 data.from_addr = copy_addr_to_reg (from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_addr_to_reg (from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
910 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
912 data.explicit_inc_to = -1;
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
916 data.to_addr = copy_addr_to_reg (to_addr);
918 data.explicit_inc_to = 1;
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_addr_to_reg (to_addr);
924 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
925 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
926 align = MOVE_MAX * BITS_PER_UNIT;
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
938 if (mode == VOIDmode)
941 icode = mov_optab->handlers[(int) mode].insn_code;
942 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
943 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
945 max_size = GET_MODE_SIZE (mode);
948 /* The code above should have handled everything. */
962 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
963 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
965 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
968 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
975 to1 = adjust_address (data.to, QImode, data.offset);
983 /* Return number of insns required to move L bytes by pieces.
984 ALIGN (in bits) is maximum alignment we can assume. */
986 static unsigned HOST_WIDE_INT
987 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
989 unsigned HOST_WIDE_INT n_insns = 0;
990 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
992 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
993 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
994 align = MOVE_MAX * BITS_PER_UNIT;
998 enum machine_mode mode = VOIDmode, tmode;
999 enum insn_code icode;
1001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1003 if (GET_MODE_SIZE (tmode) < max_size)
1006 if (mode == VOIDmode)
1009 icode = mov_optab->handlers[(int) mode].insn_code;
1010 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1011 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1013 max_size = GET_MODE_SIZE (mode);
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1026 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1027 struct move_by_pieces *data)
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1032 while (data->len >= size)
1035 data->offset -= size;
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1043 to1 = adjust_address (data->to, mode, data->offset);
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1050 from1 = adjust_address (data->from, mode, data->offset);
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 GEN_INT (-(HOST_WIDE_INT)size)));
1055 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 GEN_INT (-(HOST_WIDE_INT)size)));
1060 emit_insn ((*genfun) (to1, from1));
1063 #ifdef PUSH_ROUNDING
1064 emit_single_push_insn (mode, from1, NULL);
1070 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1071 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1073 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1075 if (! data->reverse)
1076 data->offset += size;
1082 /* Emit code to move a block Y to a block X. This may be done with
1083 string-move instructions, with multiple scalar move instructions,
1084 or with a library call.
1086 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1087 SIZE is an rtx that says how long they are.
1088 ALIGN is the maximum alignment we can assume they have.
1089 METHOD describes what kind of copy this is, and what mechanisms may be used.
1091 Return the address of the new block, if memcpy is called and returns it,
1095 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1103 case BLOCK_OP_NORMAL:
1104 may_use_call = true;
1107 case BLOCK_OP_CALL_PARM:
1108 may_use_call = block_move_libcall_safe_for_call_parm ();
1110 /* Make inhibit_defer_pop nonzero around the library call
1111 to force it to pop the arguments right away. */
1115 case BLOCK_OP_NO_LIBCALL:
1116 may_use_call = false;
1123 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (GET_CODE (size) == CONST_INT)
1141 if (INTVAL (size) == 0)
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, size);
1147 set_mem_size (y, size);
1150 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align))
1154 else if (may_use_call)
1155 retval = emit_block_move_via_libcall (x, y, size);
1157 emit_block_move_via_loop (x, y, size, align);
1159 if (method == BLOCK_OP_CALL_PARM)
1165 /* A subroutine of emit_block_move. Returns true if calling the
1166 block move libcall will not clobber any parameters which may have
1167 already been placed on the stack. */
1170 block_move_libcall_safe_for_call_parm (void)
1172 /* If arguments are pushed on the stack, then they're safe. */
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1180 tree fn = emit_block_move_libcall_fn (false);
1182 if (REG_PARM_STACK_SPACE (fn) != 0)
1187 /* If any argument goes in memory, then it might clobber an outgoing
1190 CUMULATIVE_ARGS args_so_far;
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1199 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1201 if (!tmp || !REG_P (tmp))
1203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1206 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1212 /* A subroutine of emit_block_move. Expand a movmem pattern;
1213 return true if successful. */
1216 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1218 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1219 int save_volatile_ok = volatile_ok;
1220 enum machine_mode mode;
1222 /* Since this is a move insn, we don't care about volatility. */
1225 /* Try the most limited insn first, because there's no point
1226 including more than one in the machine description unless
1227 the more limited one has some advantage. */
1229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1230 mode = GET_MODE_WIDER_MODE (mode))
1232 enum insn_code code = movmem_optab[(int) mode];
1233 insn_operand_predicate_fn pred;
1235 if (code != CODE_FOR_nothing
1236 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1237 here because if SIZE is less than the mode mask, as it is
1238 returned by the macro, it will definitely be less than the
1239 actual mode mask. */
1240 && ((GET_CODE (size) == CONST_INT
1241 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1242 <= (GET_MODE_MASK (mode) >> 1)))
1243 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1244 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1245 || (*pred) (x, BLKmode))
1246 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1247 || (*pred) (y, BLKmode))
1248 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1249 || (*pred) (opalign, VOIDmode)))
1252 rtx last = get_last_insn ();
1255 op2 = convert_to_mode (mode, size, 1);
1256 pred = insn_data[(int) code].operand[2].predicate;
1257 if (pred != 0 && ! (*pred) (op2, mode))
1258 op2 = copy_to_mode_reg (mode, op2);
1260 /* ??? When called via emit_block_move_for_call, it'd be
1261 nice if there were some way to inform the backend, so
1262 that it doesn't fail the expansion because it thinks
1263 emitting the libcall would be more efficient. */
1265 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1269 volatile_ok = save_volatile_ok;
1273 delete_insns_since (last);
1277 volatile_ok = save_volatile_ok;
1281 /* A subroutine of emit_block_move. Expand a call to memcpy.
1282 Return the return value from memcpy, 0 otherwise. */
1285 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1287 rtx dst_addr, src_addr;
1288 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1289 enum machine_mode size_mode;
1292 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1293 pseudos. We can then place those new pseudos into a VAR_DECL and
1296 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1297 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1299 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1300 src_addr = convert_memory_address (ptr_mode, src_addr);
1302 dst_tree = make_tree (ptr_type_node, dst_addr);
1303 src_tree = make_tree (ptr_type_node, src_addr);
1305 size_mode = TYPE_MODE (sizetype);
1307 size = convert_to_mode (size_mode, size, 1);
1308 size = copy_to_mode_reg (size_mode, size);
1310 /* It is incorrect to use the libcall calling conventions to call
1311 memcpy in this context. This could be a user call to memcpy and
1312 the user may wish to examine the return value from memcpy. For
1313 targets where libcalls and normal calls have different conventions
1314 for returning pointers, we could end up generating incorrect code. */
1316 size_tree = make_tree (sizetype, size);
1318 fn = emit_block_move_libcall_fn (true);
1319 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1320 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1321 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1323 /* Now we have to build up the CALL_EXPR itself. */
1324 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1325 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1326 call_expr, arg_list, NULL_TREE);
1328 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1330 /* If we are initializing a readonly value, show the above call clobbered
1331 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1332 the delay slot scheduler might overlook conflicts and take nasty
1334 if (RTX_UNCHANGING_P (dst))
1335 add_function_usage_to
1336 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1337 gen_rtx_CLOBBER (VOIDmode, dst),
1343 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1344 for the function we use for block copies. The first time FOR_CALL
1345 is true, we call assemble_external. */
1347 static GTY(()) tree block_move_fn;
1350 init_block_move_fn (const char *asmspec)
1356 fn = get_identifier ("memcpy");
1357 args = build_function_type_list (ptr_type_node, ptr_type_node,
1358 const_ptr_type_node, sizetype,
1361 fn = build_decl (FUNCTION_DECL, fn, args);
1362 DECL_EXTERNAL (fn) = 1;
1363 TREE_PUBLIC (fn) = 1;
1364 DECL_ARTIFICIAL (fn) = 1;
1365 TREE_NOTHROW (fn) = 1;
1372 SET_DECL_RTL (block_move_fn, NULL_RTX);
1373 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1378 emit_block_move_libcall_fn (int for_call)
1380 static bool emitted_extern;
1383 init_block_move_fn (NULL);
1385 if (for_call && !emitted_extern)
1387 emitted_extern = true;
1388 make_decl_rtl (block_move_fn, NULL);
1389 assemble_external (block_move_fn);
1392 return block_move_fn;
1395 /* A subroutine of emit_block_move. Copy the data via an explicit
1396 loop. This is used only when libcalls are forbidden. */
1397 /* ??? It'd be nice to copy in hunks larger than QImode. */
1400 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1401 unsigned int align ATTRIBUTE_UNUSED)
1403 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1404 enum machine_mode iter_mode;
1406 iter_mode = GET_MODE (size);
1407 if (iter_mode == VOIDmode)
1408 iter_mode = word_mode;
1410 top_label = gen_label_rtx ();
1411 cmp_label = gen_label_rtx ();
1412 iter = gen_reg_rtx (iter_mode);
1414 emit_move_insn (iter, const0_rtx);
1416 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1417 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1418 do_pending_stack_adjust ();
1420 emit_jump (cmp_label);
1421 emit_label (top_label);
1423 tmp = convert_modes (Pmode, iter_mode, iter, true);
1424 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1425 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1426 x = change_address (x, QImode, x_addr);
1427 y = change_address (y, QImode, y_addr);
1429 emit_move_insn (x, y);
1431 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1432 true, OPTAB_LIB_WIDEN);
1434 emit_move_insn (iter, tmp);
1436 emit_label (cmp_label);
1438 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1442 /* Copy all or part of a value X into registers starting at REGNO.
1443 The number of registers to be filled is NREGS. */
1446 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1449 #ifdef HAVE_load_multiple
1457 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1458 x = validize_mem (force_const_mem (mode, x));
1460 /* See if the machine can do this with a load multiple insn. */
1461 #ifdef HAVE_load_multiple
1462 if (HAVE_load_multiple)
1464 last = get_last_insn ();
1465 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1473 delete_insns_since (last);
1477 for (i = 0; i < nregs; i++)
1478 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1479 operand_subword_force (x, i, mode));
1482 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1486 move_block_from_reg (int regno, rtx x, int nregs)
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 if (HAVE_store_multiple)
1497 rtx last = get_last_insn ();
1498 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1506 delete_insns_since (last);
1510 for (i = 0; i < nregs; i++)
1512 rtx tem = operand_subword (x, i, 1, BLKmode);
1517 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1521 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1522 ORIG, where ORIG is a non-consecutive group of registers represented by
1523 a PARALLEL. The clone is identical to the original except in that the
1524 original set of registers is replaced by a new set of pseudo registers.
1525 The new set has the same modes as the original set. */
1528 gen_group_rtx (rtx orig)
1533 if (GET_CODE (orig) != PARALLEL)
1536 length = XVECLEN (orig, 0);
1537 tmps = alloca (sizeof (rtx) * length);
1539 /* Skip a NULL entry in first slot. */
1540 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1545 for (; i < length; i++)
1547 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1548 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1550 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1553 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1556 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1557 where DST is non-consecutive registers represented by a PARALLEL.
1558 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1562 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1567 if (GET_CODE (dst) != PARALLEL)
1570 /* Check for a NULL entry, used to indicate that the parameter goes
1571 both on the stack and in registers. */
1572 if (XEXP (XVECEXP (dst, 0, 0), 0))
1577 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1579 /* Process the pieces. */
1580 for (i = start; i < XVECLEN (dst, 0); i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1583 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1584 unsigned int bytelen = GET_MODE_SIZE (mode);
1587 /* Handle trailing fragments that run over the size of the struct. */
1588 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1590 /* Arrange to shift the fragment to where it belongs.
1591 extract_bit_field loads to the lsb of the reg. */
1593 #ifdef BLOCK_REG_PADDING
1594 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1595 == (BYTES_BIG_ENDIAN ? upward : downward)
1600 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1601 bytelen = ssize - bytepos;
1606 /* If we won't be loading directly from memory, protect the real source
1607 from strange tricks we might play; but make sure that the source can
1608 be loaded directly into the destination. */
1610 if (!MEM_P (orig_src)
1611 && (!CONSTANT_P (orig_src)
1612 || (GET_MODE (orig_src) != mode
1613 && GET_MODE (orig_src) != VOIDmode)))
1615 if (GET_MODE (orig_src) == VOIDmode)
1616 src = gen_reg_rtx (mode);
1618 src = gen_reg_rtx (GET_MODE (orig_src));
1620 emit_move_insn (src, orig_src);
1623 /* Optimize the access just a bit. */
1625 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1626 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1627 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1628 && bytelen == GET_MODE_SIZE (mode))
1630 tmps[i] = gen_reg_rtx (mode);
1631 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1633 else if (GET_CODE (src) == CONCAT)
1635 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1636 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1638 if ((bytepos == 0 && bytelen == slen0)
1639 || (bytepos != 0 && bytepos + bytelen <= slen))
1641 /* The following assumes that the concatenated objects all
1642 have the same size. In this case, a simple calculation
1643 can be used to determine the object and the bit field
1645 tmps[i] = XEXP (src, bytepos / slen0);
1646 if (! CONSTANT_P (tmps[i])
1647 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1648 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1649 (bytepos % slen0) * BITS_PER_UNIT,
1650 1, NULL_RTX, mode, mode);
1652 else if (bytepos == 0)
1654 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1655 emit_move_insn (mem, src);
1656 tmps[i] = adjust_address (mem, mode, 0);
1661 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1662 SIMD register, which is currently broken. While we get GCC
1663 to emit proper RTL for these cases, let's dump to memory. */
1664 else if (VECTOR_MODE_P (GET_MODE (dst))
1667 int slen = GET_MODE_SIZE (GET_MODE (src));
1670 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1671 emit_move_insn (mem, src);
1672 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1674 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1675 && XVECLEN (dst, 0) > 1)
1676 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1677 else if (CONSTANT_P (src)
1678 || (REG_P (src) && GET_MODE (src) == mode))
1681 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1682 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1686 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1687 build_int_2 (shift, 0), tmps[i], 0);
1690 /* Copy the extracted pieces into the proper (probable) hard regs. */
1691 for (i = start; i < XVECLEN (dst, 0); i++)
1692 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1695 /* Emit code to move a block SRC to block DST, where SRC and DST are
1696 non-consecutive groups of registers, each represented by a PARALLEL. */
1699 emit_group_move (rtx dst, rtx src)
1703 if (GET_CODE (src) != PARALLEL
1704 || GET_CODE (dst) != PARALLEL
1705 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1708 /* Skip first entry if NULL. */
1709 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1710 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1711 XEXP (XVECEXP (src, 0, i), 0));
1714 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1715 where SRC is non-consecutive registers represented by a PARALLEL.
1716 SSIZE represents the total size of block ORIG_DST, or -1 if not
1720 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1725 if (GET_CODE (src) != PARALLEL)
1728 /* Check for a NULL entry, used to indicate that the parameter goes
1729 both on the stack and in registers. */
1730 if (XEXP (XVECEXP (src, 0, 0), 0))
1735 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1737 /* Copy the (probable) hard regs into pseudos. */
1738 for (i = start; i < XVECLEN (src, 0); i++)
1740 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1741 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1742 emit_move_insn (tmps[i], reg);
1745 /* If we won't be storing directly into memory, protect the real destination
1746 from strange tricks we might play. */
1748 if (GET_CODE (dst) == PARALLEL)
1752 /* We can get a PARALLEL dst if there is a conditional expression in
1753 a return statement. In that case, the dst and src are the same,
1754 so no action is necessary. */
1755 if (rtx_equal_p (dst, src))
1758 /* It is unclear if we can ever reach here, but we may as well handle
1759 it. Allocate a temporary, and split this into a store/load to/from
1762 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1763 emit_group_store (temp, src, type, ssize);
1764 emit_group_load (dst, temp, type, ssize);
1767 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1769 dst = gen_reg_rtx (GET_MODE (orig_dst));
1770 /* Make life a bit easier for combine. */
1771 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1774 /* Process the pieces. */
1775 for (i = start; i < XVECLEN (src, 0); i++)
1777 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1778 enum machine_mode mode = GET_MODE (tmps[i]);
1779 unsigned int bytelen = GET_MODE_SIZE (mode);
1782 /* Handle trailing fragments that run over the size of the struct. */
1783 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1785 /* store_bit_field always takes its value from the lsb.
1786 Move the fragment to the lsb if it's not already there. */
1788 #ifdef BLOCK_REG_PADDING
1789 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1790 == (BYTES_BIG_ENDIAN ? upward : downward)
1796 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1797 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1798 build_int_2 (shift, 0), tmps[i], 0);
1800 bytelen = ssize - bytepos;
1803 if (GET_CODE (dst) == CONCAT)
1805 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1806 dest = XEXP (dst, 0);
1807 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1809 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1810 dest = XEXP (dst, 1);
1812 else if (bytepos == 0 && XVECLEN (src, 0))
1814 dest = assign_stack_temp (GET_MODE (dest),
1815 GET_MODE_SIZE (GET_MODE (dest)), 0);
1816 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1825 /* Optimize the access just a bit. */
1827 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1828 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1829 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1830 && bytelen == GET_MODE_SIZE (mode))
1831 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1833 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1837 /* Copy from the pseudo into the (probable) hard reg. */
1838 if (orig_dst != dst)
1839 emit_move_insn (orig_dst, dst);
1842 /* Generate code to copy a BLKmode object of TYPE out of a
1843 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1844 is null, a stack temporary is created. TGTBLK is returned.
1846 The purpose of this routine is to handle functions that return
1847 BLKmode structures in registers. Some machines (the PA for example)
1848 want to return all small structures in registers regardless of the
1849 structure's alignment. */
1852 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1854 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1855 rtx src = NULL, dst = NULL;
1856 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1857 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1861 tgtblk = assign_temp (build_qualified_type (type,
1863 | TYPE_QUAL_CONST)),
1865 preserve_temp_slots (tgtblk);
1868 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1869 into a new pseudo which is a full word. */
1871 if (GET_MODE (srcreg) != BLKmode
1872 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1873 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1875 /* If the structure doesn't take up a whole number of words, see whether
1876 SRCREG is padded on the left or on the right. If it's on the left,
1877 set PADDING_CORRECTION to the number of bits to skip.
1879 In most ABIs, the structure will be returned at the least end of
1880 the register, which translates to right padding on little-endian
1881 targets and left padding on big-endian targets. The opposite
1882 holds if the structure is returned at the most significant
1883 end of the register. */
1884 if (bytes % UNITS_PER_WORD != 0
1885 && (targetm.calls.return_in_msb (type)
1887 : BYTES_BIG_ENDIAN))
1889 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1891 /* Copy the structure BITSIZE bites at a time.
1893 We could probably emit more efficient code for machines which do not use
1894 strict alignment, but it doesn't seem worth the effort at the current
1896 for (bitpos = 0, xbitpos = padding_correction;
1897 bitpos < bytes * BITS_PER_UNIT;
1898 bitpos += bitsize, xbitpos += bitsize)
1900 /* We need a new source operand each time xbitpos is on a
1901 word boundary and when xbitpos == padding_correction
1902 (the first time through). */
1903 if (xbitpos % BITS_PER_WORD == 0
1904 || xbitpos == padding_correction)
1905 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1908 /* We need a new destination operand each time bitpos is on
1910 if (bitpos % BITS_PER_WORD == 0)
1911 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1913 /* Use xbitpos for the source extraction (right justified) and
1914 xbitpos for the destination store (left justified). */
1915 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1916 extract_bit_field (src, bitsize,
1917 xbitpos % BITS_PER_WORD, 1,
1918 NULL_RTX, word_mode, word_mode));
1924 /* Add a USE expression for REG to the (possibly empty) list pointed
1925 to by CALL_FUSAGE. REG must denote a hard register. */
1928 use_reg (rtx *call_fusage, rtx reg)
1931 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1935 = gen_rtx_EXPR_LIST (VOIDmode,
1936 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1939 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1940 starting at REGNO. All of these registers must be hard registers. */
1943 use_regs (rtx *call_fusage, int regno, int nregs)
1947 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1950 for (i = 0; i < nregs; i++)
1951 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1954 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1955 PARALLEL REGS. This is for calls that pass values in multiple
1956 non-contiguous locations. The Irix 6 ABI has examples of this. */
1959 use_group_regs (rtx *call_fusage, rtx regs)
1963 for (i = 0; i < XVECLEN (regs, 0); i++)
1965 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1967 /* A NULL entry means the parameter goes both on the stack and in
1968 registers. This can also be a MEM for targets that pass values
1969 partially on the stack and partially in registers. */
1970 if (reg != 0 && REG_P (reg))
1971 use_reg (call_fusage, reg);
1976 /* Determine whether the LEN bytes generated by CONSTFUN can be
1977 stored to memory using several move instructions. CONSTFUNDATA is
1978 a pointer which will be passed as argument in every CONSTFUN call.
1979 ALIGN is maximum alignment we can assume. Return nonzero if a
1980 call to store_by_pieces should succeed. */
1983 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1984 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1985 void *constfundata, unsigned int align)
1987 unsigned HOST_WIDE_INT max_size, l;
1988 HOST_WIDE_INT offset = 0;
1989 enum machine_mode mode, tmode;
1990 enum insn_code icode;
1997 if (! STORE_BY_PIECES_P (len, align))
2000 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2001 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2002 align = MOVE_MAX * BITS_PER_UNIT;
2004 /* We would first store what we can in the largest integer mode, then go to
2005 successively smaller modes. */
2008 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2013 max_size = STORE_MAX_PIECES + 1;
2014 while (max_size > 1)
2016 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2017 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2018 if (GET_MODE_SIZE (tmode) < max_size)
2021 if (mode == VOIDmode)
2024 icode = mov_optab->handlers[(int) mode].insn_code;
2025 if (icode != CODE_FOR_nothing
2026 && align >= GET_MODE_ALIGNMENT (mode))
2028 unsigned int size = GET_MODE_SIZE (mode);
2035 cst = (*constfun) (constfundata, offset, mode);
2036 if (!LEGITIMATE_CONSTANT_P (cst))
2046 max_size = GET_MODE_SIZE (mode);
2049 /* The code above should have handled everything. */
2057 /* Generate several move instructions to store LEN bytes generated by
2058 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2059 pointer which will be passed as argument in every CONSTFUN call.
2060 ALIGN is maximum alignment we can assume.
2061 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2062 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2066 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2067 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2068 void *constfundata, unsigned int align, int endp)
2070 struct store_by_pieces data;
2079 if (! STORE_BY_PIECES_P (len, align))
2081 data.constfun = constfun;
2082 data.constfundata = constfundata;
2085 store_by_pieces_1 (&data, align);
2096 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2097 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2099 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2102 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2109 to1 = adjust_address (data.to, QImode, data.offset);
2117 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2118 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2121 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2123 struct store_by_pieces data;
2128 data.constfun = clear_by_pieces_1;
2129 data.constfundata = NULL;
2132 store_by_pieces_1 (&data, align);
2135 /* Callback routine for clear_by_pieces.
2136 Return const0_rtx unconditionally. */
2139 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2140 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2141 enum machine_mode mode ATTRIBUTE_UNUSED)
2146 /* Subroutine of clear_by_pieces and store_by_pieces.
2147 Generate several move instructions to store LEN bytes of block TO. (A MEM
2148 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2151 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2152 unsigned int align ATTRIBUTE_UNUSED)
2154 rtx to_addr = XEXP (data->to, 0);
2155 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2156 enum machine_mode mode = VOIDmode, tmode;
2157 enum insn_code icode;
2160 data->to_addr = to_addr;
2162 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2163 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2165 data->explicit_inc_to = 0;
2167 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2169 data->offset = data->len;
2171 /* If storing requires more than two move insns,
2172 copy addresses to registers (to make displacements shorter)
2173 and use post-increment if available. */
2174 if (!data->autinc_to
2175 && move_by_pieces_ninsns (data->len, align) > 2)
2177 /* Determine the main mode we'll be using. */
2178 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2179 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2180 if (GET_MODE_SIZE (tmode) < max_size)
2183 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2185 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2186 data->autinc_to = 1;
2187 data->explicit_inc_to = -1;
2190 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2191 && ! data->autinc_to)
2193 data->to_addr = copy_addr_to_reg (to_addr);
2194 data->autinc_to = 1;
2195 data->explicit_inc_to = 1;
2198 if ( !data->autinc_to && CONSTANT_P (to_addr))
2199 data->to_addr = copy_addr_to_reg (to_addr);
2202 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2203 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2204 align = MOVE_MAX * BITS_PER_UNIT;
2206 /* First store what we can in the largest integer mode, then go to
2207 successively smaller modes. */
2209 while (max_size > 1)
2211 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2212 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) < max_size)
2216 if (mode == VOIDmode)
2219 icode = mov_optab->handlers[(int) mode].insn_code;
2220 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2221 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2223 max_size = GET_MODE_SIZE (mode);
2226 /* The code above should have handled everything. */
2231 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2232 with move instructions for mode MODE. GENFUN is the gen_... function
2233 to make a move insn for that mode. DATA has all the other info. */
2236 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2237 struct store_by_pieces *data)
2239 unsigned int size = GET_MODE_SIZE (mode);
2242 while (data->len >= size)
2245 data->offset -= size;
2247 if (data->autinc_to)
2248 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2251 to1 = adjust_address (data->to, mode, data->offset);
2253 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2254 emit_insn (gen_add2_insn (data->to_addr,
2255 GEN_INT (-(HOST_WIDE_INT) size)));
2257 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2258 emit_insn ((*genfun) (to1, cst));
2260 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2261 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2263 if (! data->reverse)
2264 data->offset += size;
2270 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2271 its length in bytes. */
2274 clear_storage (rtx object, rtx size)
2277 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2278 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2280 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2281 just move a zero. Otherwise, do this a piece at a time. */
2282 if (GET_MODE (object) != BLKmode
2283 && GET_CODE (size) == CONST_INT
2284 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2285 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2288 if (size == const0_rtx)
2290 else if (GET_CODE (size) == CONST_INT
2291 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2292 clear_by_pieces (object, INTVAL (size), align);
2293 else if (clear_storage_via_clrmem (object, size, align))
2296 retval = clear_storage_via_libcall (object, size);
2302 /* A subroutine of clear_storage. Expand a clrmem pattern;
2303 return true if successful. */
2306 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2308 /* Try the most limited insn first, because there's no point
2309 including more than one in the machine description unless
2310 the more limited one has some advantage. */
2312 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2313 enum machine_mode mode;
2315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2318 enum insn_code code = clrmem_optab[(int) mode];
2319 insn_operand_predicate_fn pred;
2321 if (code != CODE_FOR_nothing
2322 /* We don't need MODE to be narrower than
2323 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2324 the mode mask, as it is returned by the macro, it will
2325 definitely be less than the actual mode mask. */
2326 && ((GET_CODE (size) == CONST_INT
2327 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2328 <= (GET_MODE_MASK (mode) >> 1)))
2329 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2330 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2331 || (*pred) (object, BLKmode))
2332 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2333 || (*pred) (opalign, VOIDmode)))
2336 rtx last = get_last_insn ();
2339 op1 = convert_to_mode (mode, size, 1);
2340 pred = insn_data[(int) code].operand[1].predicate;
2341 if (pred != 0 && ! (*pred) (op1, mode))
2342 op1 = copy_to_mode_reg (mode, op1);
2344 pat = GEN_FCN ((int) code) (object, op1, opalign);
2351 delete_insns_since (last);
2358 /* A subroutine of clear_storage. Expand a call to memset.
2359 Return the return value of memset, 0 otherwise. */
2362 clear_storage_via_libcall (rtx object, rtx size)
2364 tree call_expr, arg_list, fn, object_tree, size_tree;
2365 enum machine_mode size_mode;
2368 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2369 place those into new pseudos into a VAR_DECL and use them later. */
2371 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2373 size_mode = TYPE_MODE (sizetype);
2374 size = convert_to_mode (size_mode, size, 1);
2375 size = copy_to_mode_reg (size_mode, size);
2377 /* It is incorrect to use the libcall calling conventions to call
2378 memset in this context. This could be a user call to memset and
2379 the user may wish to examine the return value from memset. For
2380 targets where libcalls and normal calls have different conventions
2381 for returning pointers, we could end up generating incorrect code. */
2383 object_tree = make_tree (ptr_type_node, object);
2384 size_tree = make_tree (sizetype, size);
2386 fn = clear_storage_libcall_fn (true);
2387 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2388 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2389 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2391 /* Now we have to build up the CALL_EXPR itself. */
2392 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2393 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2394 call_expr, arg_list, NULL_TREE);
2396 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2398 /* If we are initializing a readonly value, show the above call
2399 clobbered it. Otherwise, a load from it may erroneously be
2400 hoisted from a loop. */
2401 if (RTX_UNCHANGING_P (object))
2402 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2407 /* A subroutine of clear_storage_via_libcall. Create the tree node
2408 for the function we use for block clears. The first time FOR_CALL
2409 is true, we call assemble_external. */
2411 static GTY(()) tree block_clear_fn;
2414 init_block_clear_fn (const char *asmspec)
2416 if (!block_clear_fn)
2420 fn = get_identifier ("memset");
2421 args = build_function_type_list (ptr_type_node, ptr_type_node,
2422 integer_type_node, sizetype,
2425 fn = build_decl (FUNCTION_DECL, fn, args);
2426 DECL_EXTERNAL (fn) = 1;
2427 TREE_PUBLIC (fn) = 1;
2428 DECL_ARTIFICIAL (fn) = 1;
2429 TREE_NOTHROW (fn) = 1;
2431 block_clear_fn = fn;
2436 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2437 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2442 clear_storage_libcall_fn (int for_call)
2444 static bool emitted_extern;
2446 if (!block_clear_fn)
2447 init_block_clear_fn (NULL);
2449 if (for_call && !emitted_extern)
2451 emitted_extern = true;
2452 make_decl_rtl (block_clear_fn, NULL);
2453 assemble_external (block_clear_fn);
2456 return block_clear_fn;
2459 /* Generate code to copy Y into X.
2460 Both Y and X must have the same mode, except that
2461 Y can be a constant with VOIDmode.
2462 This mode cannot be BLKmode; use emit_block_move for that.
2464 Return the last instruction emitted. */
2467 emit_move_insn (rtx x, rtx y)
2469 enum machine_mode mode = GET_MODE (x);
2470 rtx y_cst = NULL_RTX;
2473 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2479 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2480 && (last_insn = compress_float_constant (x, y)))
2485 if (!LEGITIMATE_CONSTANT_P (y))
2487 y = force_const_mem (mode, y);
2489 /* If the target's cannot_force_const_mem prevented the spill,
2490 assume that the target's move expanders will also take care
2491 of the non-legitimate constant. */
2497 /* If X or Y are memory references, verify that their addresses are valid
2500 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2501 && ! push_operand (x, GET_MODE (x)))
2503 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2504 x = validize_mem (x);
2507 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2509 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2510 y = validize_mem (y);
2512 if (mode == BLKmode)
2515 last_insn = emit_move_insn_1 (x, y);
2517 if (y_cst && REG_P (x)
2518 && (set = single_set (last_insn)) != NULL_RTX
2519 && SET_DEST (set) == x
2520 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2521 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2526 /* Low level part of emit_move_insn.
2527 Called just like emit_move_insn, but assumes X and Y
2528 are basically valid. */
2531 emit_move_insn_1 (rtx x, rtx y)
2533 enum machine_mode mode = GET_MODE (x);
2534 enum machine_mode submode;
2535 enum mode_class class = GET_MODE_CLASS (mode);
2537 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2540 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2542 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2544 /* Expand complex moves by moving real part and imag part, if possible. */
2545 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2546 && BLKmode != (submode = GET_MODE_INNER (mode))
2547 && (mov_optab->handlers[(int) submode].insn_code
2548 != CODE_FOR_nothing))
2550 /* Don't split destination if it is a stack push. */
2551 int stack = push_operand (x, GET_MODE (x));
2553 #ifdef PUSH_ROUNDING
2554 /* In case we output to the stack, but the size is smaller than the
2555 machine can push exactly, we need to use move instructions. */
2557 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2558 != GET_MODE_SIZE (submode)))
2561 HOST_WIDE_INT offset1, offset2;
2563 /* Do not use anti_adjust_stack, since we don't want to update
2564 stack_pointer_delta. */
2565 temp = expand_binop (Pmode,
2566 #ifdef STACK_GROWS_DOWNWARD
2574 (GET_MODE_SIZE (GET_MODE (x)))),
2575 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2577 if (temp != stack_pointer_rtx)
2578 emit_move_insn (stack_pointer_rtx, temp);
2580 #ifdef STACK_GROWS_DOWNWARD
2582 offset2 = GET_MODE_SIZE (submode);
2584 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2585 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2586 + GET_MODE_SIZE (submode));
2589 emit_move_insn (change_address (x, submode,
2590 gen_rtx_PLUS (Pmode,
2592 GEN_INT (offset1))),
2593 gen_realpart (submode, y));
2594 emit_move_insn (change_address (x, submode,
2595 gen_rtx_PLUS (Pmode,
2597 GEN_INT (offset2))),
2598 gen_imagpart (submode, y));
2602 /* If this is a stack, push the highpart first, so it
2603 will be in the argument order.
2605 In that case, change_address is used only to convert
2606 the mode, not to change the address. */
2609 /* Note that the real part always precedes the imag part in memory
2610 regardless of machine's endianness. */
2611 #ifdef STACK_GROWS_DOWNWARD
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
2614 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2615 gen_realpart (submode, y));
2617 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2618 gen_realpart (submode, y));
2619 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2620 gen_imagpart (submode, y));
2625 rtx realpart_x, realpart_y;
2626 rtx imagpart_x, imagpart_y;
2628 /* If this is a complex value with each part being smaller than a
2629 word, the usual calling sequence will likely pack the pieces into
2630 a single register. Unfortunately, SUBREG of hard registers only
2631 deals in terms of words, so we have a problem converting input
2632 arguments to the CONCAT of two registers that is used elsewhere
2633 for complex values. If this is before reload, we can copy it into
2634 memory and reload. FIXME, we should see about using extract and
2635 insert on integer registers, but complex short and complex char
2636 variables should be rarely used. */
2637 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2638 && (reload_in_progress | reload_completed) == 0)
2641 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2643 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2645 if (packed_dest_p || packed_src_p)
2647 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2648 ? MODE_FLOAT : MODE_INT);
2650 enum machine_mode reg_mode
2651 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2653 if (reg_mode != BLKmode)
2655 rtx mem = assign_stack_temp (reg_mode,
2656 GET_MODE_SIZE (mode), 0);
2657 rtx cmem = adjust_address (mem, mode, 0);
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2663 emit_move_insn_1 (cmem, y);
2664 return emit_move_insn_1 (sreg, mem);
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2670 emit_move_insn_1 (mem, sreg);
2671 return emit_move_insn_1 (x, cmem);
2677 realpart_x = gen_realpart (submode, x);
2678 realpart_y = gen_realpart (submode, y);
2679 imagpart_x = gen_imagpart (submode, x);
2680 imagpart_y = gen_imagpart (submode, y);
2682 /* Show the output dies here. This is necessary for SUBREGs
2683 of pseudos since we cannot track their lifetimes correctly;
2684 hard regs shouldn't appear here except as return values.
2685 We never want to emit such a clobber after reload. */
2687 && ! (reload_in_progress || reload_completed)
2688 && (GET_CODE (realpart_x) == SUBREG
2689 || GET_CODE (imagpart_x) == SUBREG))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2692 emit_move_insn (realpart_x, realpart_y);
2693 emit_move_insn (imagpart_x, imagpart_y);
2696 return get_last_insn ();
2699 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2700 find a mode to do it in. If we have a movcc, use it. Otherwise,
2701 find the MODE_INT mode of the same width. */
2702 else if (GET_MODE_CLASS (mode) == MODE_CC
2703 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2705 enum insn_code insn_code;
2706 enum machine_mode tmode = VOIDmode;
2710 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2713 for (tmode = QImode; tmode != VOIDmode;
2714 tmode = GET_MODE_WIDER_MODE (tmode))
2715 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2718 if (tmode == VOIDmode)
2721 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2722 may call change_address which is not appropriate if we were
2723 called when a reload was in progress. We don't have to worry
2724 about changing the address since the size in bytes is supposed to
2725 be the same. Copy the MEM to change the mode and move any
2726 substitutions from the old MEM to the new one. */
2728 if (reload_in_progress)
2730 x = gen_lowpart_common (tmode, x1);
2731 if (x == 0 && MEM_P (x1))
2733 x = adjust_address_nv (x1, tmode, 0);
2734 copy_replacements (x1, x);
2737 y = gen_lowpart_common (tmode, y1);
2738 if (y == 0 && MEM_P (y1))
2740 y = adjust_address_nv (y1, tmode, 0);
2741 copy_replacements (y1, y);
2746 x = gen_lowpart (tmode, x);
2747 y = gen_lowpart (tmode, y);
2750 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2751 return emit_insn (GEN_FCN (insn_code) (x, y));
2754 /* Try using a move pattern for the corresponding integer mode. This is
2755 only safe when simplify_subreg can convert MODE constants into integer
2756 constants. At present, it can only do this reliably if the value
2757 fits within a HOST_WIDE_INT. */
2758 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2759 && (submode = int_mode_for_mode (mode)) != BLKmode
2760 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2761 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2762 (simplify_gen_subreg (submode, x, mode, 0),
2763 simplify_gen_subreg (submode, y, mode, 0)));
2765 /* This will handle any multi-word or full-word mode that lacks a move_insn
2766 pattern. However, you will get better code if you define such patterns,
2767 even if they must turn into multiple assembler instructions. */
2768 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2775 #ifdef PUSH_ROUNDING
2777 /* If X is a push on the stack, do the push now and replace
2778 X with a reference to the stack pointer. */
2779 if (push_operand (x, GET_MODE (x)))
2784 /* Do not use anti_adjust_stack, since we don't want to update
2785 stack_pointer_delta. */
2786 temp = expand_binop (Pmode,
2787 #ifdef STACK_GROWS_DOWNWARD
2795 (GET_MODE_SIZE (GET_MODE (x)))),
2796 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2798 if (temp != stack_pointer_rtx)
2799 emit_move_insn (stack_pointer_rtx, temp);
2801 code = GET_CODE (XEXP (x, 0));
2803 /* Just hope that small offsets off SP are OK. */
2804 if (code == POST_INC)
2805 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2806 GEN_INT (-((HOST_WIDE_INT)
2807 GET_MODE_SIZE (GET_MODE (x)))));
2808 else if (code == POST_DEC)
2809 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2810 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2812 temp = stack_pointer_rtx;
2814 x = change_address (x, VOIDmode, temp);
2818 /* If we are in reload, see if either operand is a MEM whose address
2819 is scheduled for replacement. */
2820 if (reload_in_progress && MEM_P (x)
2821 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2822 x = replace_equiv_address_nv (x, inner);
2823 if (reload_in_progress && MEM_P (y)
2824 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2825 y = replace_equiv_address_nv (y, inner);
2831 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2834 rtx xpart = operand_subword (x, i, 1, mode);
2835 rtx ypart = operand_subword (y, i, 1, mode);
2837 /* If we can't get a part of Y, put Y into memory if it is a
2838 constant. Otherwise, force it into a register. If we still
2839 can't get a part of Y, abort. */
2840 if (ypart == 0 && CONSTANT_P (y))
2842 y = force_const_mem (mode, y);
2843 ypart = operand_subword (y, i, 1, mode);
2845 else if (ypart == 0)
2846 ypart = operand_subword_force (y, i, mode);
2848 if (xpart == 0 || ypart == 0)
2851 need_clobber |= (GET_CODE (xpart) == SUBREG);
2853 last_insn = emit_move_insn (xpart, ypart);
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2864 && ! (reload_in_progress || reload_completed)
2865 && need_clobber != 0)
2866 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2876 /* If Y is representable exactly in a narrower mode, and the target can
2877 perform the extension directly from constant or memory, then emit the
2878 move as an extension. */
2881 compress_float_constant (rtx x, rtx y)
2883 enum machine_mode dstmode = GET_MODE (x);
2884 enum machine_mode orig_srcmode = GET_MODE (y);
2885 enum machine_mode srcmode;
2888 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2890 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2891 srcmode != orig_srcmode;
2892 srcmode = GET_MODE_WIDER_MODE (srcmode))
2895 rtx trunc_y, last_insn;
2897 /* Skip if the target can't extend this way. */
2898 ic = can_extend_p (dstmode, srcmode, 0);
2899 if (ic == CODE_FOR_nothing)
2902 /* Skip if the narrowed value isn't exact. */
2903 if (! exact_real_truncate (srcmode, &r))
2906 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2908 if (LEGITIMATE_CONSTANT_P (trunc_y))
2910 /* Skip if the target needs extra instructions to perform
2912 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2915 else if (float_extend_from_mem[dstmode][srcmode])
2916 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2920 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2921 last_insn = get_last_insn ();
2924 set_unique_reg_note (last_insn, REG_EQUAL, y);
2932 /* Pushing data onto the stack. */
2934 /* Push a block of length SIZE (perhaps variable)
2935 and return an rtx to address the beginning of the block.
2936 The value may be virtual_outgoing_args_rtx.
2938 EXTRA is the number of bytes of padding to push in addition to SIZE.
2939 BELOW nonzero means this padding comes at low addresses;
2940 otherwise, the padding comes at high addresses. */
2943 push_block (rtx size, int extra, int below)
2947 size = convert_modes (Pmode, ptr_mode, size, 1);
2948 if (CONSTANT_P (size))
2949 anti_adjust_stack (plus_constant (size, extra));
2950 else if (REG_P (size) && extra == 0)
2951 anti_adjust_stack (size);
2954 temp = copy_to_mode_reg (Pmode, size);
2956 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2957 temp, 0, OPTAB_LIB_WIDEN);
2958 anti_adjust_stack (temp);
2961 #ifndef STACK_GROWS_DOWNWARD
2967 temp = virtual_outgoing_args_rtx;
2968 if (extra != 0 && below)
2969 temp = plus_constant (temp, extra);
2973 if (GET_CODE (size) == CONST_INT)
2974 temp = plus_constant (virtual_outgoing_args_rtx,
2975 -INTVAL (size) - (below ? 0 : extra));
2976 else if (extra != 0 && !below)
2977 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2978 negate_rtx (Pmode, plus_constant (size, extra)));
2980 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2981 negate_rtx (Pmode, size));
2984 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2987 #ifdef PUSH_ROUNDING
2989 /* Emit single push insn. */
2992 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2995 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2997 enum insn_code icode;
2998 insn_operand_predicate_fn pred;
3000 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3001 /* If there is push pattern, use it. Otherwise try old way of throwing
3002 MEM representing push operation to move expander. */
3003 icode = push_optab->handlers[(int) mode].insn_code;
3004 if (icode != CODE_FOR_nothing)
3006 if (((pred = insn_data[(int) icode].operand[0].predicate)
3007 && !((*pred) (x, mode))))
3008 x = force_reg (mode, x);
3009 emit_insn (GEN_FCN (icode) (x));
3012 if (GET_MODE_SIZE (mode) == rounded_size)
3013 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3014 /* If we are to pad downward, adjust the stack pointer first and
3015 then store X into the stack location using an offset. This is
3016 because emit_move_insn does not know how to pad; it does not have
3018 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3020 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3021 HOST_WIDE_INT offset;
3023 emit_move_insn (stack_pointer_rtx,
3024 expand_binop (Pmode,
3025 #ifdef STACK_GROWS_DOWNWARD
3031 GEN_INT (rounded_size),
3032 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3034 offset = (HOST_WIDE_INT) padding_size;
3035 #ifdef STACK_GROWS_DOWNWARD
3036 if (STACK_PUSH_CODE == POST_DEC)
3037 /* We have already decremented the stack pointer, so get the
3039 offset += (HOST_WIDE_INT) rounded_size;
3041 if (STACK_PUSH_CODE == POST_INC)
3042 /* We have already incremented the stack pointer, so get the
3044 offset -= (HOST_WIDE_INT) rounded_size;
3046 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3050 #ifdef STACK_GROWS_DOWNWARD
3051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3053 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3055 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3056 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3057 GEN_INT (rounded_size));
3059 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3062 dest = gen_rtx_MEM (mode, dest_addr);
3066 set_mem_attributes (dest, type, 1);
3068 if (flag_optimize_sibling_calls)
3069 /* Function incoming arguments may overlap with sibling call
3070 outgoing arguments and we cannot allow reordering of reads
3071 from function arguments with stores to outgoing arguments
3072 of sibling calls. */
3073 set_mem_alias_set (dest, 0);
3075 emit_move_insn (dest, x);
3079 /* Generate code to push X onto the stack, assuming it has mode MODE and
3081 MODE is redundant except when X is a CONST_INT (since they don't
3083 SIZE is an rtx for the size of data to be copied (in bytes),
3084 needed only if X is BLKmode.
3086 ALIGN (in bits) is maximum alignment we can assume.
3088 If PARTIAL and REG are both nonzero, then copy that many of the first
3089 words of X into registers starting with REG, and push the rest of X.
3090 The amount of space pushed is decreased by PARTIAL words,
3091 rounded *down* to a multiple of PARM_BOUNDARY.
3092 REG must be a hard register in this case.
3093 If REG is zero but PARTIAL is not, take any all others actions for an
3094 argument partially in registers, but do not actually load any
3097 EXTRA is the amount in bytes of extra space to leave next to this arg.
3098 This is ignored if an argument block has already been allocated.
3100 On a machine that lacks real push insns, ARGS_ADDR is the address of
3101 the bottom of the argument block for this call. We use indexing off there
3102 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3103 argument block has not been preallocated.
3105 ARGS_SO_FAR is the size of args previously pushed for this call.
3107 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3108 for arguments passed in registers. If nonzero, it will be the number
3109 of bytes required. */
3112 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3113 unsigned int align, int partial, rtx reg, int extra,
3114 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3118 enum direction stack_direction
3119 #ifdef STACK_GROWS_DOWNWARD
3125 /* Decide where to pad the argument: `downward' for below,
3126 `upward' for above, or `none' for don't pad it.
3127 Default is below for small data on big-endian machines; else above. */
3128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3130 /* Invert direction if stack is post-decrement.
3132 if (STACK_PUSH_CODE == POST_DEC)
3133 if (where_pad != none)
3134 where_pad = (where_pad == downward ? upward : downward);
3138 if (mode == BLKmode)
3140 /* Copy a block into the stack, entirely or partially. */
3143 int used = partial * UNITS_PER_WORD;
3147 if (reg && GET_CODE (reg) == PARALLEL)
3149 /* Use the size of the elt to compute offset. */
3150 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3151 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3152 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3155 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3162 /* USED is now the # of bytes we need not copy to the stack
3163 because registers will take care of them. */
3166 xinner = adjust_address (xinner, BLKmode, used);
3168 /* If the partial register-part of the arg counts in its stack size,
3169 skip the part of stack space corresponding to the registers.
3170 Otherwise, start copying to the beginning of the stack space,
3171 by setting SKIP to 0. */
3172 skip = (reg_parm_stack_space == 0) ? 0 : used;
3174 #ifdef PUSH_ROUNDING
3175 /* Do it with several push insns if that doesn't take lots of insns
3176 and if there is no difficulty with push insns that skip bytes
3177 on the stack for alignment purposes. */
3180 && GET_CODE (size) == CONST_INT
3182 && MEM_ALIGN (xinner) >= align
3183 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3184 /* Here we avoid the case of a structure whose weak alignment
3185 forces many pushes of a small amount of data,
3186 and such small pushes do rounding that causes trouble. */
3187 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3188 || align >= BIGGEST_ALIGNMENT
3189 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3190 == (align / BITS_PER_UNIT)))
3191 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra && args_addr == 0
3197 && where_pad != none && where_pad != stack_direction)
3198 anti_adjust_stack (GEN_INT (extra));
3200 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3203 #endif /* PUSH_ROUNDING */
3207 /* Otherwise make space on the stack and copy the data
3208 to the address of that space. */
3210 /* Deduct words put into registers from the size we must copy. */
3213 if (GET_CODE (size) == CONST_INT)
3214 size = GEN_INT (INTVAL (size) - used);
3216 size = expand_binop (GET_MODE (size), sub_optab, size,
3217 GEN_INT (used), NULL_RTX, 0,
3221 /* Get the address of the stack space.
3222 In this case, we do not deal with EXTRA separately.
3223 A single stack adjust will do. */
3226 temp = push_block (size, extra, where_pad == downward);
3229 else if (GET_CODE (args_so_far) == CONST_INT)
3230 temp = memory_address (BLKmode,
3231 plus_constant (args_addr,
3232 skip + INTVAL (args_so_far)));
3234 temp = memory_address (BLKmode,
3235 plus_constant (gen_rtx_PLUS (Pmode,
3240 if (!ACCUMULATE_OUTGOING_ARGS)
3242 /* If the source is referenced relative to the stack pointer,
3243 copy it to another register to stabilize it. We do not need
3244 to do this if we know that we won't be changing sp. */
3246 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3247 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3248 temp = copy_to_reg (temp);
3251 target = gen_rtx_MEM (BLKmode, temp);
3253 /* We do *not* set_mem_attributes here, because incoming arguments
3254 may overlap with sibling call outgoing arguments and we cannot
3255 allow reordering of reads from function arguments with stores
3256 to outgoing arguments of sibling calls. We do, however, want
3257 to record the alignment of the stack slot. */
3258 /* ALIGN may well be better aligned than TYPE, e.g. due to
3259 PARM_BOUNDARY. Assume the caller isn't lying. */
3260 set_mem_align (target, align);
3262 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3265 else if (partial > 0)
3267 /* Scalar partly in registers. */
3269 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3272 /* # words of start of argument
3273 that we must make space for but need not store. */
3274 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3275 int args_offset = INTVAL (args_so_far);
3278 /* Push padding now if padding above and stack grows down,
3279 or if padding below and stack grows up.
3280 But if space already allocated, this has already been done. */
3281 if (extra && args_addr == 0
3282 && where_pad != none && where_pad != stack_direction)
3283 anti_adjust_stack (GEN_INT (extra));
3285 /* If we make space by pushing it, we might as well push
3286 the real data. Otherwise, we can leave OFFSET nonzero
3287 and leave the space uninitialized. */
3291 /* Now NOT_STACK gets the number of words that we don't need to
3292 allocate on the stack. */
3293 not_stack = partial - offset;
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
3299 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3301 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3302 x = validize_mem (force_const_mem (mode, x));
3304 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3305 SUBREGs of such registers are not allowed. */
3306 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3307 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3308 x = copy_to_reg (x);
3310 /* Loop over all the words allocated on the stack for this arg. */
3311 /* We can do it by words, because any scalar bigger than a word
3312 has a size a multiple of a word. */
3313 #ifndef PUSH_ARGS_REVERSED
3314 for (i = not_stack; i < size; i++)
3316 for (i = size - 1; i >= not_stack; i--)
3318 if (i >= not_stack + offset)
3319 emit_push_insn (operand_subword_force (x, i, mode),
3320 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3322 GEN_INT (args_offset + ((i - not_stack + skip)
3324 reg_parm_stack_space, alignment_pad);
3331 /* Push padding now if padding above and stack grows down,
3332 or if padding below and stack grows up.
3333 But if space already allocated, this has already been done. */
3334 if (extra && args_addr == 0
3335 && where_pad != none && where_pad != stack_direction)
3336 anti_adjust_stack (GEN_INT (extra));
3338 #ifdef PUSH_ROUNDING
3339 if (args_addr == 0 && PUSH_ARGS)
3340 emit_single_push_insn (mode, x, type);
3344 if (GET_CODE (args_so_far) == CONST_INT)
3346 = memory_address (mode,
3347 plus_constant (args_addr,
3348 INTVAL (args_so_far)));
3350 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3352 dest = gen_rtx_MEM (mode, addr);
3354 /* We do *not* set_mem_attributes here, because incoming arguments
3355 may overlap with sibling call outgoing arguments and we cannot
3356 allow reordering of reads from function arguments with stores
3357 to outgoing arguments of sibling calls. We do, however, want
3358 to record the alignment of the stack slot. */
3359 /* ALIGN may well be better aligned than TYPE, e.g. due to
3360 PARM_BOUNDARY. Assume the caller isn't lying. */
3361 set_mem_align (dest, align);
3363 emit_move_insn (dest, x);
3367 /* If part should go in registers, copy that part
3368 into the appropriate registers. Do this now, at the end,
3369 since mem-to-mem copies above may do function calls. */
3370 if (partial > 0 && reg != 0)
3372 /* Handle calls that pass values in multiple non-contiguous locations.
3373 The Irix 6 ABI has examples of this. */
3374 if (GET_CODE (reg) == PARALLEL)
3375 emit_group_load (reg, x, type, -1);
3377 move_block_to_reg (REGNO (reg), x, partial, mode);
3380 if (extra && args_addr == 0 && where_pad == stack_direction)
3381 anti_adjust_stack (GEN_INT (extra));
3383 if (alignment_pad && args_addr == 0)
3384 anti_adjust_stack (alignment_pad);
3387 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3391 get_subtarget (rtx x)
3394 /* Only registers can be subtargets. */
3396 /* If the register is readonly, it can't be set more than once. */
3397 || RTX_UNCHANGING_P (x)
3398 /* Don't use hard regs to avoid extending their life. */
3399 || REGNO (x) < FIRST_PSEUDO_REGISTER
3400 /* Avoid subtargets inside loops,
3401 since they hide some invariant expressions. */
3402 || preserve_subexpressions_p ())
3406 /* Expand an assignment that stores the value of FROM into TO.
3407 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3408 (If the value is constant, this rtx is a constant.)
3409 Otherwise, the returned value is NULL_RTX. */
3412 expand_assignment (tree to, tree from, int want_value)
3417 /* Don't crash if the lhs of the assignment was erroneous. */
3419 if (TREE_CODE (to) == ERROR_MARK)
3421 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3422 return want_value ? result : NULL_RTX;
3425 /* Assignment of a structure component needs special treatment
3426 if the structure component's rtx is not simply a MEM.
3427 Assignment of an array element at a constant index, and assignment of
3428 an array element in an unaligned packed structure field, has the same
3431 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3432 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3433 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3435 enum machine_mode mode1;
3436 HOST_WIDE_INT bitsize, bitpos;
3444 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3445 &unsignedp, &volatilep);
3447 /* If we are going to use store_bit_field and extract_bit_field,
3448 make sure to_rtx will be safe for multiple use. */
3450 if (mode1 == VOIDmode && want_value)
3451 tem = stabilize_reference (tem);
3453 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3457 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3459 if (!MEM_P (to_rtx))
3462 #ifdef POINTERS_EXTEND_UNSIGNED
3463 if (GET_MODE (offset_rtx) != Pmode)
3464 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3466 if (GET_MODE (offset_rtx) != ptr_mode)
3467 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3470 /* A constant address in TO_RTX can have VOIDmode, we must not try
3471 to call force_reg for that case. Avoid that case. */
3473 && GET_MODE (to_rtx) == BLKmode
3474 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3476 && (bitpos % bitsize) == 0
3477 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3478 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3480 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3484 to_rtx = offset_address (to_rtx, offset_rtx,
3485 highest_pow2_factor_for_target (to,
3491 /* If the field is at offset zero, we could have been given the
3492 DECL_RTX of the parent struct. Don't munge it. */
3493 to_rtx = shallow_copy_rtx (to_rtx);
3495 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3498 /* Deal with volatile and readonly fields. The former is only done
3499 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3500 if (volatilep && MEM_P (to_rtx))
3502 if (to_rtx == orig_to_rtx)
3503 to_rtx = copy_rtx (to_rtx);
3504 MEM_VOLATILE_P (to_rtx) = 1;
3507 if (TREE_CODE (to) == COMPONENT_REF
3508 && TREE_READONLY (TREE_OPERAND (to, 1))
3509 /* We can't assert that a MEM won't be set more than once
3510 if the component is not addressable because another
3511 non-addressable component may be referenced by the same MEM. */
3512 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3514 if (to_rtx == orig_to_rtx)
3515 to_rtx = copy_rtx (to_rtx);
3516 RTX_UNCHANGING_P (to_rtx) = 1;
3519 if (MEM_P (to_rtx) && ! can_address_p (to))
3521 if (to_rtx == orig_to_rtx)
3522 to_rtx = copy_rtx (to_rtx);
3523 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3526 /* Optimize bitfld op= val in certain cases. */
3527 while (mode1 == VOIDmode && !want_value
3528 && bitsize > 0 && bitsize < BITS_PER_WORD
3529 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3530 && !TREE_SIDE_EFFECTS (to)
3531 && !TREE_THIS_VOLATILE (to))
3534 rtx value, str_rtx = to_rtx;
3535 HOST_WIDE_INT bitpos1 = bitpos;
3540 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3541 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3544 op0 = TREE_OPERAND (src, 0);
3545 op1 = TREE_OPERAND (src, 1);
3548 if (! operand_equal_p (to, op0, 0))
3551 if (MEM_P (str_rtx))
3553 enum machine_mode mode = GET_MODE (str_rtx);
3554 HOST_WIDE_INT offset1;
3556 if (GET_MODE_BITSIZE (mode) == 0
3557 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3559 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3561 if (mode == VOIDmode)
3565 bitpos1 %= GET_MODE_BITSIZE (mode);
3566 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3567 str_rtx = adjust_address (str_rtx, mode, offset1);
3569 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3572 /* If the bit field covers the whole REG/MEM, store_field
3573 will likely generate better code. */
3574 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3577 /* We can't handle fields split accross multiple entities. */
3578 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3581 if (BYTES_BIG_ENDIAN)
3582 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3585 /* Special case some bitfield op= exp. */
3586 switch (TREE_CODE (src))
3590 /* For now, just optimize the case of the topmost bitfield
3591 where we don't need to do any masking and also
3592 1 bit bitfields where xor can be used.
3593 We might win by one instruction for the other bitfields
3594 too if insv/extv instructions aren't used, so that
3595 can be added later. */
3596 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3597 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3599 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3600 value = convert_modes (GET_MODE (str_rtx),
3601 TYPE_MODE (TREE_TYPE (op1)), value,
3602 TYPE_UNSIGNED (TREE_TYPE (op1)));
3604 /* We may be accessing data outside the field, which means
3605 we can alias adjacent data. */
3606 if (MEM_P (str_rtx))
3608 str_rtx = shallow_copy_rtx (str_rtx);
3609 set_mem_alias_set (str_rtx, 0);
3610 set_mem_expr (str_rtx, 0);
3613 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3615 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3617 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3621 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3622 value, build_int_2 (bitpos1, 0),
3624 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3625 value, str_rtx, 1, OPTAB_WIDEN);
3626 if (result != str_rtx)
3627 emit_move_insn (str_rtx, result);
3639 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3641 /* Spurious cast for HPUX compiler. */
3642 ? ((enum machine_mode)
3643 TYPE_MODE (TREE_TYPE (to)))
3645 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3647 preserve_temp_slots (result);
3651 /* If the value is meaningful, convert RESULT to the proper mode.
3652 Otherwise, return nothing. */
3653 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3654 TYPE_MODE (TREE_TYPE (from)),
3656 TYPE_UNSIGNED (TREE_TYPE (to)))
3660 /* If the rhs is a function call and its value is not an aggregate,
3661 call the function before we start to compute the lhs.
3662 This is needed for correct code for cases such as
3663 val = setjmp (buf) on machines where reference to val
3664 requires loading up part of an address in a separate insn.
3666 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3667 since it might be a promoted variable where the zero- or sign- extension
3668 needs to be done. Handling this in the normal way is safe because no
3669 computation is done before the call. */
3670 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3671 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3672 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3673 && REG_P (DECL_RTL (to))))
3678 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3680 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3682 /* Handle calls that return values in multiple non-contiguous locations.
3683 The Irix 6 ABI has examples of this. */
3684 if (GET_CODE (to_rtx) == PARALLEL)
3685 emit_group_load (to_rtx, value, TREE_TYPE (from),
3686 int_size_in_bytes (TREE_TYPE (from)));
3687 else if (GET_MODE (to_rtx) == BLKmode)
3688 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3691 if (POINTER_TYPE_P (TREE_TYPE (to)))
3692 value = convert_memory_address (GET_MODE (to_rtx), value);
3693 emit_move_insn (to_rtx, value);
3695 preserve_temp_slots (to_rtx);
3698 return want_value ? to_rtx : NULL_RTX;
3701 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3702 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3705 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3707 /* Don't move directly into a return register. */
3708 if (TREE_CODE (to) == RESULT_DECL
3709 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3714 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3716 if (GET_CODE (to_rtx) == PARALLEL)
3717 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3718 int_size_in_bytes (TREE_TYPE (from)));
3720 emit_move_insn (to_rtx, temp);
3722 preserve_temp_slots (to_rtx);
3725 return want_value ? to_rtx : NULL_RTX;
3728 /* In case we are returning the contents of an object which overlaps
3729 the place the value is being stored, use a safe function when copying
3730 a value through a pointer into a structure value return block. */
3731 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3732 && current_function_returns_struct
3733 && !current_function_returns_pcc_struct)
3738 size = expr_size (from);
3739 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3741 emit_library_call (memmove_libfunc, LCT_NORMAL,
3742 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3743 XEXP (from_rtx, 0), Pmode,
3744 convert_to_mode (TYPE_MODE (sizetype),
3745 size, TYPE_UNSIGNED (sizetype)),
3746 TYPE_MODE (sizetype));
3748 preserve_temp_slots (to_rtx);
3751 return want_value ? to_rtx : NULL_RTX;
3754 /* Compute FROM and store the value in the rtx we got. */
3757 result = store_expr (from, to_rtx, want_value);
3758 preserve_temp_slots (result);
3761 return want_value ? result : NULL_RTX;
3764 /* Generate code for computing expression EXP,
3765 and storing the value into TARGET.
3767 If WANT_VALUE & 1 is nonzero, return a copy of the value
3768 not in TARGET, so that we can be sure to use the proper
3769 value in a containing expression even if TARGET has something
3770 else stored in it. If possible, we copy the value through a pseudo
3771 and return that pseudo. Or, if the value is constant, we try to
3772 return the constant. In some cases, we return a pseudo
3773 copied *from* TARGET.
3775 If the mode is BLKmode then we may return TARGET itself.
3776 It turns out that in BLKmode it doesn't cause a problem.
3777 because C has no operators that could combine two different
3778 assignments into the same BLKmode object with different values
3779 with no sequence point. Will other languages need this to
3782 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3783 to catch quickly any cases where the caller uses the value
3784 and fails to set WANT_VALUE.
3786 If WANT_VALUE & 2 is set, this is a store into a call param on the
3787 stack, and block moves may need to be treated specially. */
3790 store_expr (tree exp, rtx target, int want_value)
3793 rtx alt_rtl = NULL_RTX;
3794 int dont_return_target = 0;
3795 int dont_store_target = 0;
3797 if (VOID_TYPE_P (TREE_TYPE (exp)))
3799 /* C++ can generate ?: expressions with a throw expression in one
3800 branch and an rvalue in the other. Here, we resolve attempts to
3801 store the throw expression's nonexistent result. */
3804 expand_expr (exp, const0_rtx, VOIDmode, 0);
3807 if (TREE_CODE (exp) == COMPOUND_EXPR)
3809 /* Perform first part of compound expression, then assign from second
3811 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3812 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3813 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3815 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3817 /* For conditional expression, get safe form of the target. Then
3818 test the condition, doing the appropriate assignment on either
3819 side. This avoids the creation of unnecessary temporaries.
3820 For non-BLKmode, it is more efficient not to do this. */
3822 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3824 do_pending_stack_adjust ();
3826 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3827 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3828 emit_jump_insn (gen_jump (lab2));
3831 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3835 return want_value & 1 ? target : NULL_RTX;
3837 else if ((want_value & 1) != 0
3839 && ! MEM_VOLATILE_P (target)
3840 && GET_MODE (target) != BLKmode)
3841 /* If target is in memory and caller wants value in a register instead,
3842 arrange that. Pass TARGET as target for expand_expr so that,
3843 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3844 We know expand_expr will not use the target in that case.
3845 Don't do this if TARGET is volatile because we are supposed
3846 to write it and then read it. */
3848 temp = expand_expr (exp, target, GET_MODE (target),
3849 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3850 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3852 /* If TEMP is already in the desired TARGET, only copy it from
3853 memory and don't store it there again. */
3855 || (rtx_equal_p (temp, target)
3856 && ! side_effects_p (temp) && ! side_effects_p (target)))
3857 dont_store_target = 1;
3858 temp = copy_to_reg (temp);
3860 dont_return_target = 1;
3862 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3863 /* If this is a scalar in a register that is stored in a wider mode
3864 than the declared mode, compute the result into its declared mode
3865 and then convert to the wider mode. Our value is the computed
3868 rtx inner_target = 0;
3870 /* If we don't want a value, we can do the conversion inside EXP,
3871 which will often result in some optimizations. Do the conversion
3872 in two steps: first change the signedness, if needed, then
3873 the extend. But don't do this if the type of EXP is a subtype
3874 of something else since then the conversion might involve
3875 more than just converting modes. */
3876 if ((want_value & 1) == 0
3877 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3878 && TREE_TYPE (TREE_TYPE (exp)) == 0
3879 && (!lang_hooks.reduce_bit_field_operations
3880 || (GET_MODE_PRECISION (GET_MODE (target))
3881 == TYPE_PRECISION (TREE_TYPE (exp)))))
3883 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3884 != SUBREG_PROMOTED_UNSIGNED_P (target))
3886 (lang_hooks.types.signed_or_unsigned_type
3887 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3889 exp = convert (lang_hooks.types.type_for_mode
3890 (GET_MODE (SUBREG_REG (target)),
3891 SUBREG_PROMOTED_UNSIGNED_P (target)),
3894 inner_target = SUBREG_REG (target);
3897 temp = expand_expr (exp, inner_target, VOIDmode,
3898 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3900 /* If TEMP is a MEM and we want a result value, make the access
3901 now so it gets done only once. Strictly speaking, this is
3902 only necessary if the MEM is volatile, or if the address
3903 overlaps TARGET. But not performing the load twice also
3904 reduces the amount of rtl we generate and then have to CSE. */
3905 if (MEM_P (temp) && (want_value & 1) != 0)
3906 temp = copy_to_reg (temp);
3908 /* If TEMP is a VOIDmode constant, use convert_modes to make
3909 sure that we properly convert it. */
3910 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3912 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3913 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3914 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3915 GET_MODE (target), temp,
3916 SUBREG_PROMOTED_UNSIGNED_P (target));
3919 convert_move (SUBREG_REG (target), temp,
3920 SUBREG_PROMOTED_UNSIGNED_P (target));
3922 /* If we promoted a constant, change the mode back down to match
3923 target. Otherwise, the caller might get confused by a result whose
3924 mode is larger than expected. */
3926 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3928 if (GET_MODE (temp) != VOIDmode)
3930 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3931 SUBREG_PROMOTED_VAR_P (temp) = 1;
3932 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3933 SUBREG_PROMOTED_UNSIGNED_P (target));
3936 temp = convert_modes (GET_MODE (target),
3937 GET_MODE (SUBREG_REG (target)),
3938 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3941 return want_value & 1 ? temp : NULL_RTX;
3945 temp = expand_expr_real (exp, target, GET_MODE (target),
3947 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3949 /* Return TARGET if it's a specified hardware register.
3950 If TARGET is a volatile mem ref, either return TARGET
3951 or return a reg copied *from* TARGET; ANSI requires this.
3953 Otherwise, if TEMP is not TARGET, return TEMP
3954 if it is constant (for efficiency),
3955 or if we really want the correct value. */
3956 if (!(target && REG_P (target)
3957 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3958 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3959 && ! rtx_equal_p (temp, target)
3960 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3961 dont_return_target = 1;
3964 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3965 the same as that of TARGET, adjust the constant. This is needed, for
3966 example, in case it is a CONST_DOUBLE and we want only a word-sized
3968 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3969 && TREE_CODE (exp) != ERROR_MARK
3970 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3971 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3972 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3974 /* If value was not generated in the target, store it there.
3975 Convert the value to TARGET's type first if necessary and emit the
3976 pending incrementations that have been queued when expanding EXP.
3977 Note that we cannot emit the whole queue blindly because this will
3978 effectively disable the POST_INC optimization later.
3980 If TEMP and TARGET compare equal according to rtx_equal_p, but
3981 one or both of them are volatile memory refs, we have to distinguish
3983 - expand_expr has used TARGET. In this case, we must not generate
3984 another copy. This can be detected by TARGET being equal according
3986 - expand_expr has not used TARGET - that means that the source just
3987 happens to have the same RTX form. Since temp will have been created
3988 by expand_expr, it will compare unequal according to == .
3989 We must generate a copy in this case, to reach the correct number
3990 of volatile memory references. */
3992 if ((! rtx_equal_p (temp, target)
3993 || (temp != target && (side_effects_p (temp)
3994 || side_effects_p (target))))
3995 && TREE_CODE (exp) != ERROR_MARK
3996 && ! dont_store_target
3997 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3998 but TARGET is not valid memory reference, TEMP will differ
3999 from TARGET although it is really the same location. */
4000 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4001 /* If there's nothing to copy, don't bother. Don't call expr_size
4002 unless necessary, because some front-ends (C++) expr_size-hook
4003 aborts on objects that are not supposed to be bit-copied or
4005 && expr_size (exp) != const0_rtx)
4007 if (GET_MODE (temp) != GET_MODE (target)
4008 && GET_MODE (temp) != VOIDmode)
4010 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4011 if (dont_return_target)
4013 /* In this case, we will return TEMP,
4014 so make sure it has the proper mode.
4015 But don't forget to store the value into TARGET. */
4016 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4017 emit_move_insn (target, temp);
4020 convert_move (target, temp, unsignedp);
4023 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4025 /* Handle copying a string constant into an array. The string
4026 constant may be shorter than the array. So copy just the string's
4027 actual length, and clear the rest. First get the size of the data
4028 type of the string, which is actually the size of the target. */
4029 rtx size = expr_size (exp);
4031 if (GET_CODE (size) == CONST_INT
4032 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4033 emit_block_move (target, temp, size,
4035 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4038 /* Compute the size of the data to copy from the string. */
4040 = size_binop (MIN_EXPR,
4041 make_tree (sizetype, size),
4042 size_int (TREE_STRING_LENGTH (exp)));
4044 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4046 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4049 /* Copy that much. */
4050 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4051 TYPE_UNSIGNED (sizetype));
4052 emit_block_move (target, temp, copy_size_rtx,
4054 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4056 /* Figure out how much is left in TARGET that we have to clear.
4057 Do all calculations in ptr_mode. */
4058 if (GET_CODE (copy_size_rtx) == CONST_INT)
4060 size = plus_constant (size, -INTVAL (copy_size_rtx));
4061 target = adjust_address (target, BLKmode,
4062 INTVAL (copy_size_rtx));
4066 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4067 copy_size_rtx, NULL_RTX, 0,
4070 #ifdef POINTERS_EXTEND_UNSIGNED
4071 if (GET_MODE (copy_size_rtx) != Pmode)
4072 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4073 TYPE_UNSIGNED (sizetype));
4076 target = offset_address (target, copy_size_rtx,
4077 highest_pow2_factor (copy_size));
4078 label = gen_label_rtx ();
4079 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4080 GET_MODE (size), 0, label);
4083 if (size != const0_rtx)
4084 clear_storage (target, size);
4090 /* Handle calls that return values in multiple non-contiguous locations.
4091 The Irix 6 ABI has examples of this. */
4092 else if (GET_CODE (target) == PARALLEL)
4093 emit_group_load (target, temp, TREE_TYPE (exp),
4094 int_size_in_bytes (TREE_TYPE (exp)));
4095 else if (GET_MODE (temp) == BLKmode)
4096 emit_block_move (target, temp, expr_size (exp),
4098 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4101 temp = force_operand (temp, target);
4103 emit_move_insn (target, temp);
4107 /* If we don't want a value, return NULL_RTX. */
4108 if ((want_value & 1) == 0)
4111 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4112 ??? The latter test doesn't seem to make sense. */
4113 else if (dont_return_target && !MEM_P (temp))
4116 /* Return TARGET itself if it is a hard register. */
4117 else if ((want_value & 1) != 0
4118 && GET_MODE (target) != BLKmode
4119 && ! (REG_P (target)
4120 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4121 return copy_to_reg (target);
4127 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4128 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4129 are set to non-constant values and place it in *P_NC_ELTS. */
4132 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4133 HOST_WIDE_INT *p_nc_elts)
4135 HOST_WIDE_INT nz_elts, nc_elts;
4141 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4143 tree value = TREE_VALUE (list);
4144 tree purpose = TREE_PURPOSE (list);
4148 if (TREE_CODE (purpose) == RANGE_EXPR)
4150 tree lo_index = TREE_OPERAND (purpose, 0);
4151 tree hi_index = TREE_OPERAND (purpose, 1);
4153 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4154 mult = (tree_low_cst (hi_index, 1)
4155 - tree_low_cst (lo_index, 1) + 1);
4158 switch (TREE_CODE (value))
4162 HOST_WIDE_INT nz = 0, nc = 0;
4163 categorize_ctor_elements_1 (value, &nz, &nc);
4164 nz_elts += mult * nz;
4165 nc_elts += mult * nc;
4171 if (!initializer_zerop (value))
4175 if (!initializer_zerop (TREE_REALPART (value)))
4177 if (!initializer_zerop (TREE_IMAGPART (value)))
4183 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4184 if (!initializer_zerop (TREE_VALUE (v)))
4191 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4197 *p_nz_elts += nz_elts;
4198 *p_nc_elts += nc_elts;
4202 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4203 HOST_WIDE_INT *p_nc_elts)
4207 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4210 /* Count the number of scalars in TYPE. Return -1 on overflow or
4214 count_type_elements (tree type)
4216 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4217 switch (TREE_CODE (type))
4221 tree telts = array_type_nelts (type);
4222 if (telts && host_integerp (telts, 1))
4224 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4225 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4228 else if (max / n > m)
4236 HOST_WIDE_INT n = 0, t;
4239 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4240 if (TREE_CODE (f) == FIELD_DECL)
4242 t = count_type_elements (TREE_TYPE (f));
4252 case QUAL_UNION_TYPE:
4254 /* Ho hum. How in the world do we guess here? Clearly it isn't
4255 right to count the fields. Guess based on the number of words. */
4256 HOST_WIDE_INT n = int_size_in_bytes (type);
4259 return n / UNITS_PER_WORD;
4266 return TYPE_VECTOR_SUBPARTS (type);
4275 case REFERENCE_TYPE:
4289 /* Return 1 if EXP contains mostly (3/4) zeros. */
4292 mostly_zeros_p (tree exp)
4294 if (TREE_CODE (exp) == CONSTRUCTOR)
4297 HOST_WIDE_INT nz_elts, nc_elts, elts;
4299 /* If there are no ranges of true bits, it is all zero. */
4300 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4301 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4303 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4304 elts = count_type_elements (TREE_TYPE (exp));
4306 return nz_elts < elts / 4;
4309 return initializer_zerop (exp);
4312 /* Helper function for store_constructor.
4313 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4314 TYPE is the type of the CONSTRUCTOR, not the element type.
4315 CLEARED is as for store_constructor.
4316 ALIAS_SET is the alias set to use for any stores.
4318 This provides a recursive shortcut back to store_constructor when it isn't
4319 necessary to go through store_field. This is so that we can pass through
4320 the cleared field to let store_constructor know that we may not have to
4321 clear a substructure if the outer structure has already been cleared. */
4324 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4325 HOST_WIDE_INT bitpos, enum machine_mode mode,
4326 tree exp, tree type, int cleared, int alias_set)
4328 if (TREE_CODE (exp) == CONSTRUCTOR
4329 /* We can only call store_constructor recursively if the size and
4330 bit position are on a byte boundary. */
4331 && bitpos % BITS_PER_UNIT == 0
4332 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4333 /* If we have a nonzero bitpos for a register target, then we just
4334 let store_field do the bitfield handling. This is unlikely to
4335 generate unnecessary clear instructions anyways. */
4336 && (bitpos == 0 || MEM_P (target)))
4340 = adjust_address (target,
4341 GET_MODE (target) == BLKmode
4343 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4344 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4347 /* Update the alias set, if required. */
4348 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4349 && MEM_ALIAS_SET (target) != 0)
4351 target = copy_rtx (target);
4352 set_mem_alias_set (target, alias_set);
4355 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4358 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4362 /* Store the value of constructor EXP into the rtx TARGET.
4363 TARGET is either a REG or a MEM; we know it cannot conflict, since
4364 safe_from_p has been called.
4365 CLEARED is true if TARGET is known to have been zero'd.
4366 SIZE is the number of bytes of TARGET we are allowed to modify: this
4367 may not be the same as the size of EXP if we are assigning to a field
4368 which has been packed to exclude padding bits. */
4371 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4373 tree type = TREE_TYPE (exp);
4374 #ifdef WORD_REGISTER_OPERATIONS
4375 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4378 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4379 || TREE_CODE (type) == QUAL_UNION_TYPE)
4383 /* If size is zero or the target is already cleared, do nothing. */
4384 if (size == 0 || cleared)
4386 /* We either clear the aggregate or indicate the value is dead. */
4387 else if ((TREE_CODE (type) == UNION_TYPE
4388 || TREE_CODE (type) == QUAL_UNION_TYPE)
4389 && ! CONSTRUCTOR_ELTS (exp))
4390 /* If the constructor is empty, clear the union. */
4392 clear_storage (target, expr_size (exp));
4396 /* If we are building a static constructor into a register,
4397 set the initial value as zero so we can fold the value into
4398 a constant. But if more than one register is involved,
4399 this probably loses. */
4400 else if (REG_P (target) && TREE_STATIC (exp)
4401 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4403 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4407 /* If the constructor has fewer fields than the structure
4408 or if we are initializing the structure to mostly zeros,
4409 clear the whole structure first. Don't do this if TARGET is a
4410 register whose mode size isn't equal to SIZE since clear_storage
4411 can't handle this case. */
4413 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4414 || mostly_zeros_p (exp))
4416 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4419 rtx xtarget = target;
4421 if (readonly_fields_p (type))
4423 xtarget = copy_rtx (xtarget);
4424 RTX_UNCHANGING_P (xtarget) = 1;
4427 clear_storage (xtarget, GEN_INT (size));
4432 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4434 /* Store each element of the constructor into
4435 the corresponding field of TARGET. */
4437 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4439 tree field = TREE_PURPOSE (elt);
4440 tree value = TREE_VALUE (elt);
4441 enum machine_mode mode;
4442 HOST_WIDE_INT bitsize;
4443 HOST_WIDE_INT bitpos = 0;
4445 rtx to_rtx = target;
4447 /* Just ignore missing fields.
4448 We cleared the whole structure, above,
4449 if any fields are missing. */
4453 if (cleared && initializer_zerop (value))
4456 if (host_integerp (DECL_SIZE (field), 1))
4457 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4461 mode = DECL_MODE (field);
4462 if (DECL_BIT_FIELD (field))
4465 offset = DECL_FIELD_OFFSET (field);
4466 if (host_integerp (offset, 0)
4467 && host_integerp (bit_position (field), 0))
4469 bitpos = int_bit_position (field);
4473 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4480 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4481 make_tree (TREE_TYPE (exp),
4484 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4485 if (!MEM_P (to_rtx))
4488 #ifdef POINTERS_EXTEND_UNSIGNED
4489 if (GET_MODE (offset_rtx) != Pmode)
4490 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4492 if (GET_MODE (offset_rtx) != ptr_mode)
4493 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4496 to_rtx = offset_address (to_rtx, offset_rtx,
4497 highest_pow2_factor (offset));
4500 if (TREE_READONLY (field))
4503 to_rtx = copy_rtx (to_rtx);
4505 RTX_UNCHANGING_P (to_rtx) = 1;
4508 #ifdef WORD_REGISTER_OPERATIONS
4509 /* If this initializes a field that is smaller than a word, at the
4510 start of a word, try to widen it to a full word.
4511 This special case allows us to output C++ member function
4512 initializations in a form that the optimizers can understand. */
4514 && bitsize < BITS_PER_WORD
4515 && bitpos % BITS_PER_WORD == 0
4516 && GET_MODE_CLASS (mode) == MODE_INT
4517 && TREE_CODE (value) == INTEGER_CST
4519 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4521 tree type = TREE_TYPE (value);
4523 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4525 type = lang_hooks.types.type_for_size
4526 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4527 value = convert (type, value);
4530 if (BYTES_BIG_ENDIAN)
4532 = fold (build2 (LSHIFT_EXPR, type, value,
4533 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4534 bitsize = BITS_PER_WORD;
4539 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4540 && DECL_NONADDRESSABLE_P (field))
4542 to_rtx = copy_rtx (to_rtx);
4543 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4546 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4547 value, type, cleared,
4548 get_alias_set (TREE_TYPE (field)));
4552 else if (TREE_CODE (type) == ARRAY_TYPE)
4558 tree elttype = TREE_TYPE (type);
4560 HOST_WIDE_INT minelt = 0;
4561 HOST_WIDE_INT maxelt = 0;
4563 domain = TYPE_DOMAIN (type);
4564 const_bounds_p = (TYPE_MIN_VALUE (domain)
4565 && TYPE_MAX_VALUE (domain)
4566 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4567 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4569 /* If we have constant bounds for the range of the type, get them. */
4572 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4573 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4576 /* If the constructor has fewer elements than the array,
4577 clear the whole array first. Similarly if this is
4578 static constructor of a non-BLKmode object. */
4581 else if (REG_P (target) && TREE_STATIC (exp))
4585 HOST_WIDE_INT count = 0, zero_count = 0;
4586 need_to_clear = ! const_bounds_p;
4588 /* This loop is a more accurate version of the loop in
4589 mostly_zeros_p (it handles RANGE_EXPR in an index).
4590 It is also needed to check for missing elements. */
4591 for (elt = CONSTRUCTOR_ELTS (exp);
4592 elt != NULL_TREE && ! need_to_clear;
4593 elt = TREE_CHAIN (elt))
4595 tree index = TREE_PURPOSE (elt);
4596 HOST_WIDE_INT this_node_count;
4598 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4600 tree lo_index = TREE_OPERAND (index, 0);
4601 tree hi_index = TREE_OPERAND (index, 1);
4603 if (! host_integerp (lo_index, 1)
4604 || ! host_integerp (hi_index, 1))
4610 this_node_count = (tree_low_cst (hi_index, 1)
4611 - tree_low_cst (lo_index, 1) + 1);
4614 this_node_count = 1;
4616 count += this_node_count;
4617 if (mostly_zeros_p (TREE_VALUE (elt)))
4618 zero_count += this_node_count;
4621 /* Clear the entire array first if there are any missing elements,
4622 or if the incidence of zero elements is >= 75%. */
4624 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4628 if (need_to_clear && size > 0)
4631 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4633 clear_storage (target, GEN_INT (size));
4637 if (!cleared && REG_P (target))
4638 /* Inform later passes that the old value is dead. */
4639 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4641 /* Store each element of the constructor into
4642 the corresponding element of TARGET, determined
4643 by counting the elements. */
4644 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4646 elt = TREE_CHAIN (elt), i++)
4648 enum machine_mode mode;
4649 HOST_WIDE_INT bitsize;
4650 HOST_WIDE_INT bitpos;
4652 tree value = TREE_VALUE (elt);
4653 tree index = TREE_PURPOSE (elt);
4654 rtx xtarget = target;
4656 if (cleared && initializer_zerop (value))
4659 unsignedp = TYPE_UNSIGNED (elttype);
4660 mode = TYPE_MODE (elttype);
4661 if (mode == BLKmode)
4662 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4663 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4666 bitsize = GET_MODE_BITSIZE (mode);
4668 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4670 tree lo_index = TREE_OPERAND (index, 0);
4671 tree hi_index = TREE_OPERAND (index, 1);
4672 rtx index_r, pos_rtx;
4673 HOST_WIDE_INT lo, hi, count;
4676 /* If the range is constant and "small", unroll the loop. */
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4695 && !MEM_KEEP_ALIAS_SET_P (target)
4696 && TREE_CODE (type) == ARRAY_TYPE
4697 && TYPE_NONALIASED_COMPONENT (type))
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
4710 rtx loop_start = gen_label_rtx ();
4711 rtx loop_end = gen_label_rtx ();
4714 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4715 unsignedp = TYPE_UNSIGNED (domain);
4717 index = build_decl (VAR_DECL, NULL_TREE, domain);
4720 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4722 SET_DECL_RTL (index, index_r);
4723 store_expr (lo_index, index_r, 0);
4725 /* Build the head of the loop. */
4726 do_pending_stack_adjust ();
4727 emit_label (loop_start);
4729 /* Assign value to element index. */
4731 = convert (ssizetype,
4732 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4733 index, TYPE_MIN_VALUE (domain))));
4734 position = size_binop (MULT_EXPR, position,
4736 TYPE_SIZE_UNIT (elttype)));
4738 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4739 xtarget = offset_address (target, pos_rtx,
4740 highest_pow2_factor (position));
4741 xtarget = adjust_address (xtarget, mode, 0);
4742 if (TREE_CODE (value) == CONSTRUCTOR)
4743 store_constructor (value, xtarget, cleared,
4744 bitsize / BITS_PER_UNIT);
4746 store_expr (value, xtarget, 0);
4748 /* Generate a conditional jump to exit the loop. */
4749 exit_cond = build2 (LT_EXPR, integer_type_node,
4751 jumpif (exit_cond, loop_end);
4753 /* Update the loop counter, and jump to the head of
4755 expand_assignment (index,
4756 build2 (PLUS_EXPR, TREE_TYPE (index),
4757 index, integer_one_node), 0);
4759 emit_jump (loop_start);
4761 /* Build the end of the loop. */
4762 emit_label (loop_end);
4765 else if ((index != 0 && ! host_integerp (index, 0))
4766 || ! host_integerp (TYPE_SIZE (elttype), 1))
4771 index = ssize_int (1);
4774 index = fold_convert (ssizetype,
4775 fold (build2 (MINUS_EXPR,
4778 TYPE_MIN_VALUE (domain))));
4780 position = size_binop (MULT_EXPR, index,
4782 TYPE_SIZE_UNIT (elttype)));
4783 xtarget = offset_address (target,
4784 expand_expr (position, 0, VOIDmode, 0),
4785 highest_pow2_factor (position));
4786 xtarget = adjust_address (xtarget, mode, 0);
4787 store_expr (value, xtarget, 0);
4792 bitpos = ((tree_low_cst (index, 0) - minelt)
4793 * tree_low_cst (TYPE_SIZE (elttype), 1));
4795 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4797 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4798 && TREE_CODE (type) == ARRAY_TYPE
4799 && TYPE_NONALIASED_COMPONENT (type))
4801 target = copy_rtx (target);
4802 MEM_KEEP_ALIAS_SET_P (target) = 1;
4804 store_constructor_field (target, bitsize, bitpos, mode, value,
4805 type, cleared, get_alias_set (elttype));
4810 else if (TREE_CODE (type) == VECTOR_TYPE)
4816 tree elttype = TREE_TYPE (type);
4817 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4818 enum machine_mode eltmode = TYPE_MODE (elttype);
4819 HOST_WIDE_INT bitsize;
4820 HOST_WIDE_INT bitpos;
4824 if (eltmode == BLKmode)
4827 n_elts = TYPE_VECTOR_SUBPARTS (type);
4828 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4830 enum machine_mode mode = GET_MODE (target);
4832 icode = (int) vec_init_optab->handlers[mode].insn_code;
4833 if (icode != CODE_FOR_nothing)
4837 vector = alloca (n_elts);
4838 for (i = 0; i < n_elts; i++)
4839 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4843 /* If the constructor has fewer elements than the vector,
4844 clear the whole array first. Similarly if this is
4845 static constructor of a non-BLKmode object. */
4848 else if (REG_P (target) && TREE_STATIC (exp))
4852 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4854 for (elt = CONSTRUCTOR_ELTS (exp);
4856 elt = TREE_CHAIN (elt))
4860 int_const_binop (TRUNC_DIV_EXPR,
4861 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4862 TYPE_SIZE (elttype), 0), 1);
4864 count += n_elts_here;
4865 if (mostly_zeros_p (TREE_VALUE (elt)))
4866 zero_count += n_elts_here;
4869 /* Clear the entire vector first if there are any missing elements,
4870 or if the incidence of zero elements is >= 75%. */
4871 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4874 if (need_to_clear && size > 0 && !vector)
4877 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4879 clear_storage (target, GEN_INT (size));
4883 if (!cleared && REG_P (target))
4884 /* Inform later passes that the old value is dead. */
4885 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4887 /* Store each element of the constructor into the corresponding
4888 element of TARGET, determined by counting the elements. */
4889 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4891 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4893 tree value = TREE_VALUE (elt);
4894 tree index = TREE_PURPOSE (elt);
4895 HOST_WIDE_INT eltpos;
4897 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4898 if (cleared && initializer_zerop (value))
4902 eltpos = tree_low_cst (index, 1);
4908 /* Vector CONSTRUCTORs should only be built from smaller
4909 vectors in the case of BLKmode vectors. */
4910 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4912 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4916 enum machine_mode value_mode =
4917 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4918 ? TYPE_MODE (TREE_TYPE (value))
4920 bitpos = eltpos * elt_size;
4921 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4922 type, cleared, get_alias_set (elttype));
4927 emit_insn (GEN_FCN (icode) (target,
4928 gen_rtx_PARALLEL (GET_MODE (target),
4929 gen_rtvec_v (n_elts, vector))));
4932 /* Set constructor assignments. */
4933 else if (TREE_CODE (type) == SET_TYPE)
4935 tree elt = CONSTRUCTOR_ELTS (exp);
4936 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4937 tree domain = TYPE_DOMAIN (type);
4938 tree domain_min, domain_max, bitlength;
4940 /* The default implementation strategy is to extract the constant
4941 parts of the constructor, use that to initialize the target,
4942 and then "or" in whatever non-constant ranges we need in addition.
4944 If a large set is all zero or all ones, it is
4945 probably better to set it using memset.
4946 Also, if a large set has just a single range, it may also be
4947 better to first clear all the first clear the set (using
4948 memset), and set the bits we want. */
4950 /* Check for all zeros. */
4951 if (elt == NULL_TREE && size > 0)
4954 clear_storage (target, GEN_INT (size));
4958 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4959 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4960 bitlength = size_binop (PLUS_EXPR,
4961 size_diffop (domain_max, domain_min),
4964 nbits = tree_low_cst (bitlength, 1);
4966 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4967 are "complicated" (more than one range), initialize (the
4968 constant parts) by copying from a constant. */
4969 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4970 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4972 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4973 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4974 char *bit_buffer = alloca (nbits);
4975 HOST_WIDE_INT word = 0;
4976 unsigned int bit_pos = 0;
4977 unsigned int ibit = 0;
4978 unsigned int offset = 0; /* In bytes from beginning of set. */
4980 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4983 if (bit_buffer[ibit])
4985 if (BYTES_BIG_ENDIAN)
4986 word |= (1 << (set_word_size - 1 - bit_pos));
4988 word |= 1 << bit_pos;
4992 if (bit_pos >= set_word_size || ibit == nbits)
4994 if (word != 0 || ! cleared)
4996 rtx datum = gen_int_mode (word, mode);
4999 /* The assumption here is that it is safe to use
5000 XEXP if the set is multi-word, but not if
5001 it's single-word. */
5003 to_rtx = adjust_address (target, mode, offset);
5004 else if (offset == 0)
5008 emit_move_insn (to_rtx, datum);
5015 offset += set_word_size / BITS_PER_UNIT;
5020 /* Don't bother clearing storage if the set is all ones. */
5021 if (TREE_CHAIN (elt) != NULL_TREE
5022 || (TREE_PURPOSE (elt) == NULL_TREE
5024 : ( ! host_integerp (TREE_VALUE (elt), 0)
5025 || ! host_integerp (TREE_PURPOSE (elt), 0)
5026 || (tree_low_cst (TREE_VALUE (elt), 0)
5027 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5028 != (HOST_WIDE_INT) nbits))))
5029 clear_storage (target, expr_size (exp));
5031 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5033 /* Start of range of element or NULL. */
5034 tree startbit = TREE_PURPOSE (elt);
5035 /* End of range of element, or element value. */
5036 tree endbit = TREE_VALUE (elt);
5037 HOST_WIDE_INT startb, endb;
5038 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5040 bitlength_rtx = expand_expr (bitlength,
5041 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5043 /* Handle non-range tuple element like [ expr ]. */
5044 if (startbit == NULL_TREE)
5046 startbit = save_expr (endbit);
5050 startbit = convert (sizetype, startbit);
5051 endbit = convert (sizetype, endbit);
5052 if (! integer_zerop (domain_min))
5054 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5055 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5057 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5059 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5060 EXPAND_CONST_ADDRESS);
5066 ((build_qualified_type (lang_hooks.types.type_for_mode
5067 (GET_MODE (target), 0),
5070 emit_move_insn (targetx, target);
5073 else if (MEM_P (target))
5078 /* Optimization: If startbit and endbit are constants divisible
5079 by BITS_PER_UNIT, call memset instead. */
5080 if (TREE_CODE (startbit) == INTEGER_CST
5081 && TREE_CODE (endbit) == INTEGER_CST
5082 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5083 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5085 emit_library_call (memset_libfunc, LCT_NORMAL,
5087 plus_constant (XEXP (targetx, 0),
5088 startb / BITS_PER_UNIT),
5090 constm1_rtx, TYPE_MODE (integer_type_node),
5091 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5092 TYPE_MODE (sizetype));
5095 emit_library_call (setbits_libfunc, LCT_NORMAL,
5096 VOIDmode, 4, XEXP (targetx, 0),
5097 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5098 startbit_rtx, TYPE_MODE (sizetype),
5099 endbit_rtx, TYPE_MODE (sizetype));
5102 emit_move_insn (target, targetx);
5110 /* Store the value of EXP (an expression tree)
5111 into a subfield of TARGET which has mode MODE and occupies
5112 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5113 If MODE is VOIDmode, it means that we are storing into a bit-field.
5115 If VALUE_MODE is VOIDmode, return nothing in particular.
5116 UNSIGNEDP is not used in this case.
5118 Otherwise, return an rtx for the value stored. This rtx
5119 has mode VALUE_MODE if that is convenient to do.
5120 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5122 TYPE is the type of the underlying object,
5124 ALIAS_SET is the alias set for the destination. This value will
5125 (in general) be different from that for TARGET, since TARGET is a
5126 reference to the containing structure. */
5129 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5130 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5131 int unsignedp, tree type, int alias_set)
5133 HOST_WIDE_INT width_mask = 0;
5135 if (TREE_CODE (exp) == ERROR_MARK)
5138 /* If we have nothing to store, do nothing unless the expression has
5141 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5142 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5143 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5145 /* If we are storing into an unaligned field of an aligned union that is
5146 in a register, we may have the mode of TARGET being an integer mode but
5147 MODE == BLKmode. In that case, get an aligned object whose size and
5148 alignment are the same as TARGET and store TARGET into it (we can avoid
5149 the store if the field being stored is the entire width of TARGET). Then
5150 call ourselves recursively to store the field into a BLKmode version of
5151 that object. Finally, load from the object into TARGET. This is not
5152 very efficient in general, but should only be slightly more expensive
5153 than the otherwise-required unaligned accesses. Perhaps this can be
5154 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5155 twice, once with emit_move_insn and once via store_field. */
5158 && (REG_P (target) || GET_CODE (target) == SUBREG))
5160 rtx object = assign_temp (type, 0, 1, 1);
5161 rtx blk_object = adjust_address (object, BLKmode, 0);
5163 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5164 emit_move_insn (object, target);
5166 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5169 emit_move_insn (target, object);
5171 /* We want to return the BLKmode version of the data. */
5175 if (GET_CODE (target) == CONCAT)
5177 /* We're storing into a struct containing a single __complex. */
5181 return store_expr (exp, target, value_mode != VOIDmode);
5184 /* If the structure is in a register or if the component
5185 is a bit field, we cannot use addressing to access it.
5186 Use bit-field techniques or SUBREG to store in it. */
5188 if (mode == VOIDmode
5189 || (mode != BLKmode && ! direct_store[(int) mode]
5190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5191 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5193 || GET_CODE (target) == SUBREG
5194 /* If the field isn't aligned enough to store as an ordinary memref,
5195 store it as a bit field. */
5197 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5198 || bitpos % GET_MODE_ALIGNMENT (mode))
5199 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5200 || (bitpos % BITS_PER_UNIT != 0)))
5201 /* If the RHS and field are a constant size and the size of the
5202 RHS isn't the same size as the bitfield, we must use bitfield
5205 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5206 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5208 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5210 /* If BITSIZE is narrower than the size of the type of EXP
5211 we will be narrowing TEMP. Normally, what's wanted are the
5212 low-order bits. However, if EXP's type is a record and this is
5213 big-endian machine, we want the upper BITSIZE bits. */
5214 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5215 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5216 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5217 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5218 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5222 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5224 if (mode != VOIDmode && mode != BLKmode
5225 && mode != TYPE_MODE (TREE_TYPE (exp)))
5226 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5228 /* If the modes of TARGET and TEMP are both BLKmode, both
5229 must be in memory and BITPOS must be aligned on a byte
5230 boundary. If so, we simply do a block copy. */
5231 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5233 if (!MEM_P (target) || !MEM_P (temp)
5234 || bitpos % BITS_PER_UNIT != 0)
5237 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5238 emit_block_move (target, temp,
5239 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5243 return value_mode == VOIDmode ? const0_rtx : target;
5246 /* Store the value in the bitfield. */
5247 store_bit_field (target, bitsize, bitpos, mode, temp);
5249 if (value_mode != VOIDmode)
5251 /* The caller wants an rtx for the value.
5252 If possible, avoid refetching from the bitfield itself. */
5254 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5257 enum machine_mode tmode;
5259 tmode = GET_MODE (temp);
5260 if (tmode == VOIDmode)
5264 return expand_and (tmode, temp,
5265 gen_int_mode (width_mask, tmode),
5268 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5269 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5270 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5273 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5274 NULL_RTX, value_mode, VOIDmode);
5280 rtx addr = XEXP (target, 0);
5281 rtx to_rtx = target;
5283 /* If a value is wanted, it must be the lhs;
5284 so make the address stable for multiple use. */
5286 if (value_mode != VOIDmode && !REG_P (addr)
5287 && ! CONSTANT_ADDRESS_P (addr)
5288 /* A frame-pointer reference is already stable. */
5289 && ! (GET_CODE (addr) == PLUS
5290 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5291 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5292 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5293 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5295 /* Now build a reference to just the desired component. */
5297 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5299 if (to_rtx == target)
5300 to_rtx = copy_rtx (to_rtx);
5302 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5303 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5304 set_mem_alias_set (to_rtx, alias_set);
5306 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5310 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5311 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5312 codes and find the ultimate containing object, which we return.
5314 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5315 bit position, and *PUNSIGNEDP to the signedness of the field.
5316 If the position of the field is variable, we store a tree
5317 giving the variable offset (in units) in *POFFSET.
5318 This offset is in addition to the bit position.
5319 If the position is not variable, we store 0 in *POFFSET.
5321 If any of the extraction expressions is volatile,
5322 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5324 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5325 is a mode that can be used to access the field. In that case, *PBITSIZE
5328 If the field describes a variable-sized object, *PMODE is set to
5329 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5330 this case, but the address of the object can be found. */
5333 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5334 HOST_WIDE_INT *pbitpos, tree *poffset,
5335 enum machine_mode *pmode, int *punsignedp,
5339 enum machine_mode mode = VOIDmode;
5340 tree offset = size_zero_node;
5341 tree bit_offset = bitsize_zero_node;
5344 /* First get the mode, signedness, and size. We do this from just the
5345 outermost expression. */
5346 if (TREE_CODE (exp) == COMPONENT_REF)
5348 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5349 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5350 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5352 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5354 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5356 size_tree = TREE_OPERAND (exp, 1);
5357 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5361 mode = TYPE_MODE (TREE_TYPE (exp));
5362 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5364 if (mode == BLKmode)
5365 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5367 *pbitsize = GET_MODE_BITSIZE (mode);
5372 if (! host_integerp (size_tree, 1))
5373 mode = BLKmode, *pbitsize = -1;
5375 *pbitsize = tree_low_cst (size_tree, 1);
5378 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5379 and find the ultimate containing object. */
5382 if (TREE_CODE (exp) == BIT_FIELD_REF)
5383 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5384 else if (TREE_CODE (exp) == COMPONENT_REF)
5386 tree field = TREE_OPERAND (exp, 1);
5387 tree this_offset = component_ref_field_offset (exp);
5389 /* If this field hasn't been filled in yet, don't go
5390 past it. This should only happen when folding expressions
5391 made during type construction. */
5392 if (this_offset == 0)
5395 offset = size_binop (PLUS_EXPR, offset, this_offset);
5396 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5397 DECL_FIELD_BIT_OFFSET (field));
5399 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5402 else if (TREE_CODE (exp) == ARRAY_REF
5403 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5405 tree index = TREE_OPERAND (exp, 1);
5406 tree low_bound = array_ref_low_bound (exp);
5407 tree unit_size = array_ref_element_size (exp);
5409 /* We assume all arrays have sizes that are a multiple of a byte.
5410 First subtract the lower bound, if any, in the type of the
5411 index, then convert to sizetype and multiply by the size of the
5413 if (! integer_zerop (low_bound))
5414 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5417 offset = size_binop (PLUS_EXPR, offset,
5418 size_binop (MULT_EXPR,
5419 convert (sizetype, index),
5423 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5424 conversions that don't change the mode, and all view conversions
5425 except those that need to "step up" the alignment. */
5426 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5427 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5428 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5429 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5431 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5432 < BIGGEST_ALIGNMENT)
5433 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5434 || TYPE_ALIGN_OK (TREE_TYPE
5435 (TREE_OPERAND (exp, 0))))))
5436 && ! ((TREE_CODE (exp) == NOP_EXPR
5437 || TREE_CODE (exp) == CONVERT_EXPR)
5438 && (TYPE_MODE (TREE_TYPE (exp))
5439 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5442 /* If any reference in the chain is volatile, the effect is volatile. */
5443 if (TREE_THIS_VOLATILE (exp))
5446 exp = TREE_OPERAND (exp, 0);
5449 /* If OFFSET is constant, see if we can return the whole thing as a
5450 constant bit position. Otherwise, split it up. */
5451 if (host_integerp (offset, 0)
5452 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5454 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5455 && host_integerp (tem, 0))
5456 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5458 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5464 /* Return a tree of sizetype representing the size, in bytes, of the element
5465 of EXP, an ARRAY_REF. */
5468 array_ref_element_size (tree exp)
5470 tree aligned_size = TREE_OPERAND (exp, 3);
5471 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5473 /* If a size was specified in the ARRAY_REF, it's the size measured
5474 in alignment units of the element type. So multiply by that value. */
5476 return size_binop (MULT_EXPR, aligned_size,
5477 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5479 /* Otherwise, take the size from that of the element type. Substitute
5480 any PLACEHOLDER_EXPR that we have. */
5482 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5485 /* Return a tree representing the lower bound of the array mentioned in
5486 EXP, an ARRAY_REF. */
5489 array_ref_low_bound (tree exp)
5491 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5493 /* If a lower bound is specified in EXP, use it. */
5494 if (TREE_OPERAND (exp, 2))
5495 return TREE_OPERAND (exp, 2);
5497 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5498 substituting for a PLACEHOLDER_EXPR as needed. */
5499 if (domain_type && TYPE_MIN_VALUE (domain_type))
5500 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5502 /* Otherwise, return a zero of the appropriate type. */
5503 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5506 /* Return a tree representing the upper bound of the array mentioned in
5507 EXP, an ARRAY_REF. */
5510 array_ref_up_bound (tree exp)
5512 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5514 /* If there is a domain type and it has an upper bound, use it, substituting
5515 for a PLACEHOLDER_EXPR as needed. */
5516 if (domain_type && TYPE_MAX_VALUE (domain_type))
5517 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5519 /* Otherwise fail. */
5523 /* Return a tree representing the offset, in bytes, of the field referenced
5524 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5527 component_ref_field_offset (tree exp)
5529 tree aligned_offset = TREE_OPERAND (exp, 2);
5530 tree field = TREE_OPERAND (exp, 1);
5532 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5533 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5536 return size_binop (MULT_EXPR, aligned_offset,
5537 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5539 /* Otherwise, take the offset from that of the field. Substitute
5540 any PLACEHOLDER_EXPR that we have. */
5542 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5545 /* Return 1 if T is an expression that get_inner_reference handles. */
5548 handled_component_p (tree t)
5550 switch (TREE_CODE (t))
5555 case ARRAY_RANGE_REF:
5556 case NON_LVALUE_EXPR:
5557 case VIEW_CONVERT_EXPR:
5560 /* ??? Sure they are handled, but get_inner_reference may return
5561 a different PBITSIZE, depending upon whether the expression is
5562 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5565 return (TYPE_MODE (TREE_TYPE (t))
5566 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5573 /* Given an rtx VALUE that may contain additions and multiplications, return
5574 an equivalent value that just refers to a register, memory, or constant.
5575 This is done by generating instructions to perform the arithmetic and
5576 returning a pseudo-register containing the value.
5578 The returned value may be a REG, SUBREG, MEM or constant. */
5581 force_operand (rtx value, rtx target)
5584 /* Use subtarget as the target for operand 0 of a binary operation. */
5585 rtx subtarget = get_subtarget (target);
5586 enum rtx_code code = GET_CODE (value);
5588 /* Check for subreg applied to an expression produced by loop optimizer. */
5590 && !REG_P (SUBREG_REG (value))
5591 && !MEM_P (SUBREG_REG (value)))
5593 value = simplify_gen_subreg (GET_MODE (value),
5594 force_reg (GET_MODE (SUBREG_REG (value)),
5595 force_operand (SUBREG_REG (value),
5597 GET_MODE (SUBREG_REG (value)),
5598 SUBREG_BYTE (value));
5599 code = GET_CODE (value);
5602 /* Check for a PIC address load. */
5603 if ((code == PLUS || code == MINUS)
5604 && XEXP (value, 0) == pic_offset_table_rtx
5605 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5606 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5607 || GET_CODE (XEXP (value, 1)) == CONST))
5610 subtarget = gen_reg_rtx (GET_MODE (value));
5611 emit_move_insn (subtarget, value);
5615 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5618 target = gen_reg_rtx (GET_MODE (value));
5619 convert_move (target, force_operand (XEXP (value, 0), NULL),
5620 code == ZERO_EXTEND);
5624 if (ARITHMETIC_P (value))
5626 op2 = XEXP (value, 1);
5627 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5629 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5632 op2 = negate_rtx (GET_MODE (value), op2);
5635 /* Check for an addition with OP2 a constant integer and our first
5636 operand a PLUS of a virtual register and something else. In that
5637 case, we want to emit the sum of the virtual register and the
5638 constant first and then add the other value. This allows virtual
5639 register instantiation to simply modify the constant rather than
5640 creating another one around this addition. */
5641 if (code == PLUS && GET_CODE (op2) == CONST_INT
5642 && GET_CODE (XEXP (value, 0)) == PLUS
5643 && REG_P (XEXP (XEXP (value, 0), 0))
5644 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5645 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5647 rtx temp = expand_simple_binop (GET_MODE (value), code,
5648 XEXP (XEXP (value, 0), 0), op2,
5649 subtarget, 0, OPTAB_LIB_WIDEN);
5650 return expand_simple_binop (GET_MODE (value), code, temp,
5651 force_operand (XEXP (XEXP (value,
5653 target, 0, OPTAB_LIB_WIDEN);
5656 op1 = force_operand (XEXP (value, 0), subtarget);
5657 op2 = force_operand (op2, NULL_RTX);
5661 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5663 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5664 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5665 target, 1, OPTAB_LIB_WIDEN);
5667 return expand_divmod (0,
5668 FLOAT_MODE_P (GET_MODE (value))
5669 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5670 GET_MODE (value), op1, op2, target, 0);
5673 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5677 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5681 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5685 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5686 target, 0, OPTAB_LIB_WIDEN);
5689 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5690 target, 1, OPTAB_LIB_WIDEN);
5693 if (UNARY_P (value))
5695 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5696 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5699 #ifdef INSN_SCHEDULING
5700 /* On machines that have insn scheduling, we want all memory reference to be
5701 explicit, so we need to deal with such paradoxical SUBREGs. */
5702 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5703 && (GET_MODE_SIZE (GET_MODE (value))
5704 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5706 = simplify_gen_subreg (GET_MODE (value),
5707 force_reg (GET_MODE (SUBREG_REG (value)),
5708 force_operand (SUBREG_REG (value),
5710 GET_MODE (SUBREG_REG (value)),
5711 SUBREG_BYTE (value));
5717 /* Subroutine of expand_expr: return nonzero iff there is no way that
5718 EXP can reference X, which is being modified. TOP_P is nonzero if this
5719 call is going to be used to determine whether we need a temporary
5720 for EXP, as opposed to a recursive call to this function.
5722 It is always safe for this routine to return zero since it merely
5723 searches for optimization opportunities. */
5726 safe_from_p (rtx x, tree exp, int top_p)
5732 /* If EXP has varying size, we MUST use a target since we currently
5733 have no way of allocating temporaries of variable size
5734 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5735 So we assume here that something at a higher level has prevented a
5736 clash. This is somewhat bogus, but the best we can do. Only
5737 do this when X is BLKmode and when we are at the top level. */
5738 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5739 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5740 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5741 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5742 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5744 && GET_MODE (x) == BLKmode)
5745 /* If X is in the outgoing argument area, it is always safe. */
5747 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5748 || (GET_CODE (XEXP (x, 0)) == PLUS
5749 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5752 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5753 find the underlying pseudo. */
5754 if (GET_CODE (x) == SUBREG)
5757 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5761 /* Now look at our tree code and possibly recurse. */
5762 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5765 exp_rtl = DECL_RTL_IF_SET (exp);
5772 if (TREE_CODE (exp) == TREE_LIST)
5776 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5778 exp = TREE_CHAIN (exp);
5781 if (TREE_CODE (exp) != TREE_LIST)
5782 return safe_from_p (x, exp, 0);
5785 else if (TREE_CODE (exp) == ERROR_MARK)
5786 return 1; /* An already-visited SAVE_EXPR? */
5791 /* The only case we look at here is the DECL_INITIAL inside a
5793 return (TREE_CODE (exp) != DECL_EXPR
5794 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5795 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5796 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5800 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5805 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5809 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5810 the expression. If it is set, we conflict iff we are that rtx or
5811 both are in memory. Otherwise, we check all operands of the
5812 expression recursively. */
5814 switch (TREE_CODE (exp))
5817 /* If the operand is static or we are static, we can't conflict.
5818 Likewise if we don't conflict with the operand at all. */
5819 if (staticp (TREE_OPERAND (exp, 0))
5820 || TREE_STATIC (exp)
5821 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5824 /* Otherwise, the only way this can conflict is if we are taking
5825 the address of a DECL a that address if part of X, which is
5827 exp = TREE_OPERAND (exp, 0);
5830 if (!DECL_RTL_SET_P (exp)
5831 || !MEM_P (DECL_RTL (exp)))
5834 exp_rtl = XEXP (DECL_RTL (exp), 0);
5840 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5841 get_alias_set (exp)))
5846 /* Assume that the call will clobber all hard registers and
5848 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5853 case WITH_CLEANUP_EXPR:
5854 case CLEANUP_POINT_EXPR:
5855 /* Lowered by gimplify.c. */
5859 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5865 /* If we have an rtx, we do not need to scan our operands. */
5869 nops = first_rtl_op (TREE_CODE (exp));
5870 for (i = 0; i < nops; i++)
5871 if (TREE_OPERAND (exp, i) != 0
5872 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5875 /* If this is a language-specific tree code, it may require
5876 special handling. */
5877 if ((unsigned int) TREE_CODE (exp)
5878 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5879 && !lang_hooks.safe_from_p (x, exp))
5883 /* If we have an rtl, find any enclosed object. Then see if we conflict
5887 if (GET_CODE (exp_rtl) == SUBREG)
5889 exp_rtl = SUBREG_REG (exp_rtl);
5891 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5895 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5896 are memory and they conflict. */
5897 return ! (rtx_equal_p (x, exp_rtl)
5898 || (MEM_P (x) && MEM_P (exp_rtl)
5899 && true_dependence (exp_rtl, VOIDmode, x,
5900 rtx_addr_varies_p)));
5903 /* If we reach here, it is safe. */
5907 /* Subroutine of expand_expr: return rtx if EXP is a
5908 variable or parameter; else return 0. */
5914 switch (TREE_CODE (exp))
5918 return DECL_RTL (exp);
5924 /* Return the highest power of two that EXP is known to be a multiple of.
5925 This is used in updating alignment of MEMs in array references. */
5927 static unsigned HOST_WIDE_INT
5928 highest_pow2_factor (tree exp)
5930 unsigned HOST_WIDE_INT c0, c1;
5932 switch (TREE_CODE (exp))
5935 /* We can find the lowest bit that's a one. If the low
5936 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5937 We need to handle this case since we can find it in a COND_EXPR,
5938 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5939 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5941 if (TREE_CONSTANT_OVERFLOW (exp))
5942 return BIGGEST_ALIGNMENT;
5945 /* Note: tree_low_cst is intentionally not used here,
5946 we don't care about the upper bits. */
5947 c0 = TREE_INT_CST_LOW (exp);
5949 return c0 ? c0 : BIGGEST_ALIGNMENT;
5953 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5954 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5955 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5956 return MIN (c0, c1);
5959 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5960 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5963 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5965 if (integer_pow2p (TREE_OPERAND (exp, 1))
5966 && host_integerp (TREE_OPERAND (exp, 1), 1))
5968 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5969 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5970 return MAX (1, c0 / c1);
5974 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5976 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5979 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5982 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5983 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5984 return MIN (c0, c1);
5993 /* Similar, except that the alignment requirements of TARGET are
5994 taken into account. Assume it is at least as aligned as its
5995 type, unless it is a COMPONENT_REF in which case the layout of
5996 the structure gives the alignment. */
5998 static unsigned HOST_WIDE_INT
5999 highest_pow2_factor_for_target (tree target, tree exp)
6001 unsigned HOST_WIDE_INT target_align, factor;
6003 factor = highest_pow2_factor (exp);
6004 if (TREE_CODE (target) == COMPONENT_REF)
6005 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6007 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6008 return MAX (factor, target_align);
6011 /* Expands variable VAR. */
6014 expand_var (tree var)
6016 if (DECL_EXTERNAL (var))
6019 if (TREE_STATIC (var))
6020 /* If this is an inlined copy of a static local variable,
6021 look up the original decl. */
6022 var = DECL_ORIGIN (var);
6024 if (TREE_STATIC (var)
6025 ? !TREE_ASM_WRITTEN (var)
6026 : !DECL_RTL_SET_P (var))
6028 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6030 /* Prepare a mem & address for the decl. */
6033 if (TREE_STATIC (var))
6036 x = gen_rtx_MEM (DECL_MODE (var),
6037 gen_reg_rtx (Pmode));
6039 set_mem_attributes (x, var, 1);
6040 SET_DECL_RTL (var, x);
6042 else if (lang_hooks.expand_decl (var))
6044 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6046 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6047 rest_of_decl_compilation (var, NULL, 0, 0);
6048 else if (TREE_CODE (var) == TYPE_DECL
6049 || TREE_CODE (var) == CONST_DECL
6050 || TREE_CODE (var) == FUNCTION_DECL
6051 || TREE_CODE (var) == LABEL_DECL)
6052 /* No expansion needed. */;
6058 /* Subroutine of expand_expr. Expand the two operands of a binary
6059 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6060 The value may be stored in TARGET if TARGET is nonzero. The
6061 MODIFIER argument is as documented by expand_expr. */
6064 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6065 enum expand_modifier modifier)
6067 if (! safe_from_p (target, exp1, 1))
6069 if (operand_equal_p (exp0, exp1, 0))
6071 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6072 *op1 = copy_rtx (*op0);
6076 /* If we need to preserve evaluation order, copy exp0 into its own
6077 temporary variable so that it can't be clobbered by exp1. */
6078 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6079 exp0 = save_expr (exp0);
6080 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6081 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6086 /* expand_expr: generate code for computing expression EXP.
6087 An rtx for the computed value is returned. The value is never null.
6088 In the case of a void EXP, const0_rtx is returned.
6090 The value may be stored in TARGET if TARGET is nonzero.
6091 TARGET is just a suggestion; callers must assume that
6092 the rtx returned may not be the same as TARGET.
6094 If TARGET is CONST0_RTX, it means that the value will be ignored.
6096 If TMODE is not VOIDmode, it suggests generating the
6097 result in mode TMODE. But this is done only when convenient.
6098 Otherwise, TMODE is ignored and the value generated in its natural mode.
6099 TMODE is just a suggestion; callers must assume that
6100 the rtx returned may not have mode TMODE.
6102 Note that TARGET may have neither TMODE nor MODE. In that case, it
6103 probably will not be used.
6105 If MODIFIER is EXPAND_SUM then when EXP is an addition
6106 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6107 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6108 products as above, or REG or MEM, or constant.
6109 Ordinarily in such cases we would output mul or add instructions
6110 and then return a pseudo reg containing the sum.
6112 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6113 it also marks a label as absolutely required (it can't be dead).
6114 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6115 This is used for outputting expressions used in initializers.
6117 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6118 with a constant address even if that address is not normally legitimate.
6119 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6121 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6122 a call parameter. Such targets require special care as we haven't yet
6123 marked TARGET so that it's safe from being trashed by libcalls. We
6124 don't want to use TARGET for anything but the final result;
6125 Intermediate values must go elsewhere. Additionally, calls to
6126 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6128 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6129 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6130 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6131 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6134 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6135 enum expand_modifier, rtx *);
6138 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6139 enum expand_modifier modifier, rtx *alt_rtl)
6142 rtx ret, last = NULL;
6144 /* Handle ERROR_MARK before anybody tries to access its type. */
6145 if (TREE_CODE (exp) == ERROR_MARK
6146 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6148 ret = CONST0_RTX (tmode);
6149 return ret ? ret : const0_rtx;
6152 if (flag_non_call_exceptions)
6154 rn = lookup_stmt_eh_region (exp);
6155 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6157 last = get_last_insn ();
6160 /* If this is an expression of some kind and it has an associated line
6161 number, then emit the line number before expanding the expression.
6163 We need to save and restore the file and line information so that
6164 errors discovered during expansion are emitted with the right
6165 information. It would be better of the diagnostic routines
6166 used the file/line information embedded in the tree nodes rather
6168 if (cfun && EXPR_HAS_LOCATION (exp))
6170 location_t saved_location = input_location;
6171 input_location = EXPR_LOCATION (exp);
6172 emit_line_note (input_location);
6174 /* Record where the insns produced belong. */
6175 record_block_change (TREE_BLOCK (exp));
6177 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6179 input_location = saved_location;
6183 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6186 /* If using non-call exceptions, mark all insns that may trap.
6187 expand_call() will mark CALL_INSNs before we get to this code,
6188 but it doesn't handle libcalls, and these may trap. */
6192 for (insn = next_real_insn (last); insn;
6193 insn = next_real_insn (insn))
6195 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6196 /* If we want exceptions for non-call insns, any
6197 may_trap_p instruction may throw. */
6198 && GET_CODE (PATTERN (insn)) != CLOBBER
6199 && GET_CODE (PATTERN (insn)) != USE
6200 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6202 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6212 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6213 enum expand_modifier modifier, rtx *alt_rtl)
6216 tree type = TREE_TYPE (exp);
6218 enum machine_mode mode;
6219 enum tree_code code = TREE_CODE (exp);
6221 rtx subtarget, original_target;
6224 bool reduce_bit_field = false;
6225 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6226 ? reduce_to_bit_field_precision ((expr), \
6231 mode = TYPE_MODE (type);
6232 unsignedp = TYPE_UNSIGNED (type);
6233 if (lang_hooks.reduce_bit_field_operations
6234 && TREE_CODE (type) == INTEGER_TYPE
6235 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6237 /* An operation in what may be a bit-field type needs the
6238 result to be reduced to the precision of the bit-field type,
6239 which is narrower than that of the type's mode. */
6240 reduce_bit_field = true;
6241 if (modifier == EXPAND_STACK_PARM)
6245 /* Use subtarget as the target for operand 0 of a binary operation. */
6246 subtarget = get_subtarget (target);
6247 original_target = target;
6248 ignore = (target == const0_rtx
6249 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6250 || code == CONVERT_EXPR || code == COND_EXPR
6251 || code == VIEW_CONVERT_EXPR)
6252 && TREE_CODE (type) == VOID_TYPE));
6254 /* If we are going to ignore this result, we need only do something
6255 if there is a side-effect somewhere in the expression. If there
6256 is, short-circuit the most common cases here. Note that we must
6257 not call expand_expr with anything but const0_rtx in case this
6258 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6262 if (! TREE_SIDE_EFFECTS (exp))
6265 /* Ensure we reference a volatile object even if value is ignored, but
6266 don't do this if all we are doing is taking its address. */
6267 if (TREE_THIS_VOLATILE (exp)
6268 && TREE_CODE (exp) != FUNCTION_DECL
6269 && mode != VOIDmode && mode != BLKmode
6270 && modifier != EXPAND_CONST_ADDRESS)
6272 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6274 temp = copy_to_reg (temp);
6278 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6279 || code == INDIRECT_REF)
6280 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6283 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6284 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6287 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6290 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6291 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6292 /* If the second operand has no side effects, just evaluate
6294 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6296 else if (code == BIT_FIELD_REF)
6298 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6299 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6300 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6307 /* If will do cse, generate all results into pseudo registers
6308 since 1) that allows cse to find more things
6309 and 2) otherwise cse could produce an insn the machine
6310 cannot support. An exception is a CONSTRUCTOR into a multi-word
6311 MEM: that's much more likely to be most efficient into the MEM.
6312 Another is a CALL_EXPR which must return in memory. */
6314 if (! cse_not_expected && mode != BLKmode && target
6315 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6316 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6317 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6324 tree function = decl_function_context (exp);
6326 temp = label_rtx (exp);
6327 temp = gen_rtx_LABEL_REF (Pmode, temp);
6329 if (function != current_function_decl
6331 LABEL_REF_NONLOCAL_P (temp) = 1;
6333 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6339 /* If a static var's type was incomplete when the decl was written,
6340 but the type is complete now, lay out the decl now. */
6341 if (DECL_SIZE (exp) == 0
6342 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6343 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6344 layout_decl (exp, 0);
6346 /* ... fall through ... */
6350 if (DECL_RTL (exp) == 0)
6353 /* Ensure variable marked as used even if it doesn't go through
6354 a parser. If it hasn't be used yet, write out an external
6356 if (! TREE_USED (exp))
6358 assemble_external (exp);
6359 TREE_USED (exp) = 1;
6362 /* Show we haven't gotten RTL for this yet. */
6365 /* Variables inherited from containing functions should have
6366 been lowered by this point. */
6367 context = decl_function_context (exp);
6369 && context != current_function_decl
6370 && !TREE_STATIC (exp)
6371 /* ??? C++ creates functions that are not TREE_STATIC. */
6372 && TREE_CODE (exp) != FUNCTION_DECL)
6375 /* This is the case of an array whose size is to be determined
6376 from its initializer, while the initializer is still being parsed.
6379 else if (MEM_P (DECL_RTL (exp))
6380 && REG_P (XEXP (DECL_RTL (exp), 0)))
6381 temp = validize_mem (DECL_RTL (exp));
6383 /* If DECL_RTL is memory, we are in the normal case and either
6384 the address is not valid or it is not a register and -fforce-addr
6385 is specified, get the address into a register. */
6387 else if (MEM_P (DECL_RTL (exp))
6388 && modifier != EXPAND_CONST_ADDRESS
6389 && modifier != EXPAND_SUM
6390 && modifier != EXPAND_INITIALIZER
6391 && (! memory_address_p (DECL_MODE (exp),
6392 XEXP (DECL_RTL (exp), 0))
6394 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6397 *alt_rtl = DECL_RTL (exp);
6398 temp = replace_equiv_address (DECL_RTL (exp),
6399 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6402 /* If we got something, return it. But first, set the alignment
6403 if the address is a register. */
6406 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6407 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6412 /* If the mode of DECL_RTL does not match that of the decl, it
6413 must be a promoted value. We return a SUBREG of the wanted mode,
6414 but mark it so that we know that it was already extended. */
6416 if (REG_P (DECL_RTL (exp))
6417 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6419 /* Get the signedness used for this variable. Ensure we get the
6420 same mode we got when the variable was declared. */
6421 if (GET_MODE (DECL_RTL (exp))
6422 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6423 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6426 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6427 SUBREG_PROMOTED_VAR_P (temp) = 1;
6428 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6432 return DECL_RTL (exp);
6435 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6436 TREE_INT_CST_HIGH (exp), mode);
6438 /* ??? If overflow is set, fold will have done an incomplete job,
6439 which can result in (plus xx (const_int 0)), which can get
6440 simplified by validate_replace_rtx during virtual register
6441 instantiation, which can result in unrecognizable insns.
6442 Avoid this by forcing all overflows into registers. */
6443 if (TREE_CONSTANT_OVERFLOW (exp)
6444 && modifier != EXPAND_INITIALIZER)
6445 temp = force_reg (mode, temp);
6450 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6451 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6452 return const_vector_from_tree (exp);
6454 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6455 TREE_VECTOR_CST_ELTS (exp)),
6456 ignore ? const0_rtx : target, tmode, modifier);
6459 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6462 /* If optimized, generate immediate CONST_DOUBLE
6463 which will be turned into memory by reload if necessary.
6465 We used to force a register so that loop.c could see it. But
6466 this does not allow gen_* patterns to perform optimizations with
6467 the constants. It also produces two insns in cases like "x = 1.0;".
6468 On most machines, floating-point constants are not permitted in
6469 many insns, so we'd end up copying it to a register in any case.
6471 Now, we do the copying in expand_binop, if appropriate. */
6472 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6473 TYPE_MODE (TREE_TYPE (exp)));
6476 /* Handle evaluating a complex constant in a CONCAT target. */
6477 if (original_target && GET_CODE (original_target) == CONCAT)
6479 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6482 rtarg = XEXP (original_target, 0);
6483 itarg = XEXP (original_target, 1);
6485 /* Move the real and imaginary parts separately. */
6486 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6487 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6490 emit_move_insn (rtarg, op0);
6492 emit_move_insn (itarg, op1);
6494 return original_target;
6497 /* ... fall through ... */
6500 temp = output_constant_def (exp, 1);
6502 /* temp contains a constant address.
6503 On RISC machines where a constant address isn't valid,
6504 make some insns to get that address into a register. */
6505 if (modifier != EXPAND_CONST_ADDRESS
6506 && modifier != EXPAND_INITIALIZER
6507 && modifier != EXPAND_SUM
6508 && (! memory_address_p (mode, XEXP (temp, 0))
6509 || flag_force_addr))
6510 return replace_equiv_address (temp,
6511 copy_rtx (XEXP (temp, 0)));
6516 tree val = TREE_OPERAND (exp, 0);
6517 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6519 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6521 /* We can indeed still hit this case, typically via builtin
6522 expanders calling save_expr immediately before expanding
6523 something. Assume this means that we only have to deal
6524 with non-BLKmode values. */
6525 if (GET_MODE (ret) == BLKmode)
6528 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6529 DECL_ARTIFICIAL (val) = 1;
6530 TREE_OPERAND (exp, 0) = val;
6532 if (!CONSTANT_P (ret))
6533 ret = copy_to_reg (ret);
6534 SET_DECL_RTL (val, ret);
6543 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6544 TREE_OPERAND (exp, 0)
6545 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6550 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6551 expand_goto (TREE_OPERAND (exp, 0));
6553 expand_computed_goto (TREE_OPERAND (exp, 0));
6556 /* These are lowered during gimplification, so we should never ever
6562 case LABELED_BLOCK_EXPR:
6563 if (LABELED_BLOCK_BODY (exp))
6564 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6565 /* Should perhaps use expand_label, but this is simpler and safer. */
6566 do_pending_stack_adjust ();
6567 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6570 case EXIT_BLOCK_EXPR:
6571 if (EXIT_BLOCK_RETURN (exp))
6572 sorry ("returned value in block_exit_expr");
6573 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6577 /* If we don't need the result, just ensure we evaluate any
6583 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6584 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6589 /* All elts simple constants => refer to a constant in memory. But
6590 if this is a non-BLKmode mode, let it store a field at a time
6591 since that should make a CONST_INT or CONST_DOUBLE when we
6592 fold. Likewise, if we have a target we can use, it is best to
6593 store directly into the target unless the type is large enough
6594 that memcpy will be used. If we are making an initializer and
6595 all operands are constant, put it in memory as well.
6597 FIXME: Avoid trying to fill vector constructors piece-meal.
6598 Output them with output_constant_def below unless we're sure
6599 they're zeros. This should go away when vector initializers
6600 are treated like VECTOR_CST instead of arrays.
6602 else if ((TREE_STATIC (exp)
6603 && ((mode == BLKmode
6604 && ! (target != 0 && safe_from_p (target, exp, 1)))
6605 || TREE_ADDRESSABLE (exp)
6606 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6607 && (! MOVE_BY_PIECES_P
6608 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6610 && ! mostly_zeros_p (exp))))
6611 || ((modifier == EXPAND_INITIALIZER
6612 || modifier == EXPAND_CONST_ADDRESS)
6613 && TREE_CONSTANT (exp)))
6615 rtx constructor = output_constant_def (exp, 1);
6617 if (modifier != EXPAND_CONST_ADDRESS
6618 && modifier != EXPAND_INITIALIZER
6619 && modifier != EXPAND_SUM)
6620 constructor = validize_mem (constructor);
6626 /* Handle calls that pass values in multiple non-contiguous
6627 locations. The Irix 6 ABI has examples of this. */
6628 if (target == 0 || ! safe_from_p (target, exp, 1)
6629 || GET_CODE (target) == PARALLEL
6630 || modifier == EXPAND_STACK_PARM)
6632 = assign_temp (build_qualified_type (type,
6634 | (TREE_READONLY (exp)
6635 * TYPE_QUAL_CONST))),
6636 0, TREE_ADDRESSABLE (exp), 1);
6638 store_constructor (exp, target, 0, int_expr_size (exp));
6644 tree exp1 = TREE_OPERAND (exp, 0);
6646 if (modifier != EXPAND_WRITE)
6650 t = fold_read_from_constant_string (exp);
6652 return expand_expr (t, target, tmode, modifier);
6655 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6656 op0 = memory_address (mode, op0);
6657 temp = gen_rtx_MEM (mode, op0);
6658 set_mem_attributes (temp, exp, 0);
6660 /* If we are writing to this object and its type is a record with
6661 readonly fields, we must mark it as readonly so it will
6662 conflict with readonly references to those fields. */
6663 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6664 RTX_UNCHANGING_P (temp) = 1;
6671 #ifdef ENABLE_CHECKING
6672 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6677 tree array = TREE_OPERAND (exp, 0);
6678 tree low_bound = array_ref_low_bound (exp);
6679 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6682 /* Optimize the special-case of a zero lower bound.
6684 We convert the low_bound to sizetype to avoid some problems
6685 with constant folding. (E.g. suppose the lower bound is 1,
6686 and its mode is QI. Without the conversion, (ARRAY
6687 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6688 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6690 if (! integer_zerop (low_bound))
6691 index = size_diffop (index, convert (sizetype, low_bound));
6693 /* Fold an expression like: "foo"[2].
6694 This is not done in fold so it won't happen inside &.
6695 Don't fold if this is for wide characters since it's too
6696 difficult to do correctly and this is a very rare case. */
6698 if (modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_MEMORY)
6702 tree t = fold_read_from_constant_string (exp);
6705 return expand_expr (t, target, tmode, modifier);
6708 /* If this is a constant index into a constant array,
6709 just get the value from the array. Handle both the cases when
6710 we have an explicit constructor and when our operand is a variable
6711 that was declared const. */
6713 if (modifier != EXPAND_CONST_ADDRESS
6714 && modifier != EXPAND_INITIALIZER
6715 && modifier != EXPAND_MEMORY
6716 && TREE_CODE (array) == CONSTRUCTOR
6717 && ! TREE_SIDE_EFFECTS (array)
6718 && TREE_CODE (index) == INTEGER_CST
6719 && 0 > compare_tree_int (index,
6720 list_length (CONSTRUCTOR_ELTS
6721 (TREE_OPERAND (exp, 0)))))
6725 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6726 i = TREE_INT_CST_LOW (index);
6727 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6731 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6735 else if (optimize >= 1
6736 && modifier != EXPAND_CONST_ADDRESS
6737 && modifier != EXPAND_INITIALIZER
6738 && modifier != EXPAND_MEMORY
6739 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6740 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6741 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6742 && targetm.binds_local_p (array))
6744 if (TREE_CODE (index) == INTEGER_CST)
6746 tree init = DECL_INITIAL (array);
6748 if (TREE_CODE (init) == CONSTRUCTOR)
6752 for (elem = CONSTRUCTOR_ELTS (init);
6754 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6755 elem = TREE_CHAIN (elem))
6758 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6759 return expand_expr (fold (TREE_VALUE (elem)), target,
6762 else if (TREE_CODE (init) == STRING_CST
6763 && 0 > compare_tree_int (index,
6764 TREE_STRING_LENGTH (init)))
6766 tree type = TREE_TYPE (TREE_TYPE (init));
6767 enum machine_mode mode = TYPE_MODE (type);
6769 if (GET_MODE_CLASS (mode) == MODE_INT
6770 && GET_MODE_SIZE (mode) == 1)
6771 return gen_int_mode (TREE_STRING_POINTER (init)
6772 [TREE_INT_CST_LOW (index)], mode);
6777 goto normal_inner_ref;
6780 /* If the operand is a CONSTRUCTOR, we can just extract the
6781 appropriate field if it is present. */
6782 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6786 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6787 elt = TREE_CHAIN (elt))
6788 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6789 /* We can normally use the value of the field in the
6790 CONSTRUCTOR. However, if this is a bitfield in
6791 an integral mode that we can fit in a HOST_WIDE_INT,
6792 we must mask only the number of bits in the bitfield,
6793 since this is done implicitly by the constructor. If
6794 the bitfield does not meet either of those conditions,
6795 we can't do this optimization. */
6796 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6797 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6799 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6800 <= HOST_BITS_PER_WIDE_INT))))
6802 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6803 && modifier == EXPAND_STACK_PARM)
6805 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6806 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6808 HOST_WIDE_INT bitsize
6809 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6810 enum machine_mode imode
6811 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6813 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6815 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6816 op0 = expand_and (imode, op0, op1, target);
6821 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6824 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6826 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6834 goto normal_inner_ref;
6837 case ARRAY_RANGE_REF:
6840 enum machine_mode mode1;
6841 HOST_WIDE_INT bitsize, bitpos;
6844 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6845 &mode1, &unsignedp, &volatilep);
6848 /* If we got back the original object, something is wrong. Perhaps
6849 we are evaluating an expression too early. In any event, don't
6850 infinitely recurse. */
6854 /* If TEM's type is a union of variable size, pass TARGET to the inner
6855 computation, since it will need a temporary and TARGET is known
6856 to have to do. This occurs in unchecked conversion in Ada. */
6860 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6861 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6863 && modifier != EXPAND_STACK_PARM
6864 ? target : NULL_RTX),
6866 (modifier == EXPAND_INITIALIZER
6867 || modifier == EXPAND_CONST_ADDRESS
6868 || modifier == EXPAND_STACK_PARM)
6869 ? modifier : EXPAND_NORMAL);
6871 /* If this is a constant, put it into a register if it is a
6872 legitimate constant and OFFSET is 0 and memory if it isn't. */
6873 if (CONSTANT_P (op0))
6875 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6876 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6878 op0 = force_reg (mode, op0);
6880 op0 = validize_mem (force_const_mem (mode, op0));
6883 /* Otherwise, if this object not in memory and we either have an
6884 offset or a BLKmode result, put it there. This case can't occur in
6885 C, but can in Ada if we have unchecked conversion of an expression
6886 from a scalar type to an array or record type or for an
6887 ARRAY_RANGE_REF whose type is BLKmode. */
6888 else if (!MEM_P (op0)
6890 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6892 tree nt = build_qualified_type (TREE_TYPE (tem),
6893 (TYPE_QUALS (TREE_TYPE (tem))
6894 | TYPE_QUAL_CONST));
6895 rtx memloc = assign_temp (nt, 1, 1, 1);
6897 emit_move_insn (memloc, op0);
6903 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6909 #ifdef POINTERS_EXTEND_UNSIGNED
6910 if (GET_MODE (offset_rtx) != Pmode)
6911 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6913 if (GET_MODE (offset_rtx) != ptr_mode)
6914 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6917 if (GET_MODE (op0) == BLKmode
6918 /* A constant address in OP0 can have VOIDmode, we must
6919 not try to call force_reg in that case. */
6920 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6922 && (bitpos % bitsize) == 0
6923 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6924 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6926 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6930 op0 = offset_address (op0, offset_rtx,
6931 highest_pow2_factor (offset));
6934 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6935 record its alignment as BIGGEST_ALIGNMENT. */
6936 if (MEM_P (op0) && bitpos == 0 && offset != 0
6937 && is_aligning_offset (offset, tem))
6938 set_mem_align (op0, BIGGEST_ALIGNMENT);
6940 /* Don't forget about volatility even if this is a bitfield. */
6941 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6943 if (op0 == orig_op0)
6944 op0 = copy_rtx (op0);
6946 MEM_VOLATILE_P (op0) = 1;
6949 /* The following code doesn't handle CONCAT.
6950 Assume only bitpos == 0 can be used for CONCAT, due to
6951 one element arrays having the same mode as its element. */
6952 if (GET_CODE (op0) == CONCAT)
6954 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6959 /* In cases where an aligned union has an unaligned object
6960 as a field, we might be extracting a BLKmode value from
6961 an integer-mode (e.g., SImode) object. Handle this case
6962 by doing the extract into an object as wide as the field
6963 (which we know to be the width of a basic mode), then
6964 storing into memory, and changing the mode to BLKmode. */
6965 if (mode1 == VOIDmode
6966 || REG_P (op0) || GET_CODE (op0) == SUBREG
6967 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6968 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6969 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6970 && modifier != EXPAND_CONST_ADDRESS
6971 && modifier != EXPAND_INITIALIZER)
6972 /* If the field isn't aligned enough to fetch as a memref,
6973 fetch it as a bit field. */
6974 || (mode1 != BLKmode
6975 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6976 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6978 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6979 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6980 && ((modifier == EXPAND_CONST_ADDRESS
6981 || modifier == EXPAND_INITIALIZER)
6983 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6984 || (bitpos % BITS_PER_UNIT != 0)))
6985 /* If the type and the field are a constant size and the
6986 size of the type isn't the same size as the bitfield,
6987 we must use bitfield operations. */
6989 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6991 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6994 enum machine_mode ext_mode = mode;
6996 if (ext_mode == BLKmode
6997 && ! (target != 0 && MEM_P (op0)
6999 && bitpos % BITS_PER_UNIT == 0))
7000 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7002 if (ext_mode == BLKmode)
7005 target = assign_temp (type, 0, 1, 1);
7010 /* In this case, BITPOS must start at a byte boundary and
7011 TARGET, if specified, must be a MEM. */
7013 || (target != 0 && !MEM_P (target))
7014 || bitpos % BITS_PER_UNIT != 0)
7017 emit_block_move (target,
7018 adjust_address (op0, VOIDmode,
7019 bitpos / BITS_PER_UNIT),
7020 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7022 (modifier == EXPAND_STACK_PARM
7023 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7028 op0 = validize_mem (op0);
7030 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7031 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7033 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7034 (modifier == EXPAND_STACK_PARM
7035 ? NULL_RTX : target),
7036 ext_mode, ext_mode);
7038 /* If the result is a record type and BITSIZE is narrower than
7039 the mode of OP0, an integral mode, and this is a big endian
7040 machine, we must put the field into the high-order bits. */
7041 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7042 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7043 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7044 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7045 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7049 /* If the result type is BLKmode, store the data into a temporary
7050 of the appropriate type, but with the mode corresponding to the
7051 mode for the data we have (op0's mode). It's tempting to make
7052 this a constant type, since we know it's only being stored once,
7053 but that can cause problems if we are taking the address of this
7054 COMPONENT_REF because the MEM of any reference via that address
7055 will have flags corresponding to the type, which will not
7056 necessarily be constant. */
7057 if (mode == BLKmode)
7060 = assign_stack_temp_for_type
7061 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7063 emit_move_insn (new, op0);
7064 op0 = copy_rtx (new);
7065 PUT_MODE (op0, BLKmode);
7066 set_mem_attributes (op0, exp, 1);
7072 /* If the result is BLKmode, use that to access the object
7074 if (mode == BLKmode)
7077 /* Get a reference to just this component. */
7078 if (modifier == EXPAND_CONST_ADDRESS
7079 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7080 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7082 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7084 if (op0 == orig_op0)
7085 op0 = copy_rtx (op0);
7087 set_mem_attributes (op0, exp, 0);
7088 if (REG_P (XEXP (op0, 0)))
7089 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7091 MEM_VOLATILE_P (op0) |= volatilep;
7092 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7093 || modifier == EXPAND_CONST_ADDRESS
7094 || modifier == EXPAND_INITIALIZER)
7096 else if (target == 0)
7097 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7099 convert_move (target, op0, unsignedp);
7104 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7107 /* Check for a built-in function. */
7108 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7109 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7111 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7113 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7114 == BUILT_IN_FRONTEND)
7115 return lang_hooks.expand_expr (exp, original_target,
7119 return expand_builtin (exp, target, subtarget, tmode, ignore);
7122 return expand_call (exp, target, ignore);
7124 case NON_LVALUE_EXPR:
7127 if (TREE_OPERAND (exp, 0) == error_mark_node)
7130 if (TREE_CODE (type) == UNION_TYPE)
7132 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7134 /* If both input and output are BLKmode, this conversion isn't doing
7135 anything except possibly changing memory attribute. */
7136 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7138 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7141 result = copy_rtx (result);
7142 set_mem_attributes (result, exp, 0);
7148 if (TYPE_MODE (type) != BLKmode)
7149 target = gen_reg_rtx (TYPE_MODE (type));
7151 target = assign_temp (type, 0, 1, 1);
7155 /* Store data into beginning of memory target. */
7156 store_expr (TREE_OPERAND (exp, 0),
7157 adjust_address (target, TYPE_MODE (valtype), 0),
7158 modifier == EXPAND_STACK_PARM ? 2 : 0);
7160 else if (REG_P (target))
7161 /* Store this field into a union of the proper type. */
7162 store_field (target,
7163 MIN ((int_size_in_bytes (TREE_TYPE
7164 (TREE_OPERAND (exp, 0)))
7166 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7167 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7168 VOIDmode, 0, type, 0);
7172 /* Return the entire union. */
7176 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7178 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7181 /* If the signedness of the conversion differs and OP0 is
7182 a promoted SUBREG, clear that indication since we now
7183 have to do the proper extension. */
7184 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7185 && GET_CODE (op0) == SUBREG)
7186 SUBREG_PROMOTED_VAR_P (op0) = 0;
7188 return REDUCE_BIT_FIELD (op0);
7191 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7192 op0 = REDUCE_BIT_FIELD (op0);
7193 if (GET_MODE (op0) == mode)
7196 /* If OP0 is a constant, just convert it into the proper mode. */
7197 if (CONSTANT_P (op0))
7199 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7200 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7202 if (modifier == EXPAND_INITIALIZER)
7203 return simplify_gen_subreg (mode, op0, inner_mode,
7204 subreg_lowpart_offset (mode,
7207 return convert_modes (mode, inner_mode, op0,
7208 TYPE_UNSIGNED (inner_type));
7211 if (modifier == EXPAND_INITIALIZER)
7212 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7216 convert_to_mode (mode, op0,
7217 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7219 convert_move (target, op0,
7220 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7223 case VIEW_CONVERT_EXPR:
7224 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7226 /* If the input and output modes are both the same, we are done.
7227 Otherwise, if neither mode is BLKmode and both are integral and within
7228 a word, we can use gen_lowpart. If neither is true, make sure the
7229 operand is in memory and convert the MEM to the new mode. */
7230 if (TYPE_MODE (type) == GET_MODE (op0))
7232 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7233 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7234 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7235 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7236 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7237 op0 = gen_lowpart (TYPE_MODE (type), op0);
7238 else if (!MEM_P (op0))
7240 /* If the operand is not a MEM, force it into memory. Since we
7241 are going to be be changing the mode of the MEM, don't call
7242 force_const_mem for constants because we don't allow pool
7243 constants to change mode. */
7244 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7246 if (TREE_ADDRESSABLE (exp))
7249 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7251 = assign_stack_temp_for_type
7252 (TYPE_MODE (inner_type),
7253 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7255 emit_move_insn (target, op0);
7259 /* At this point, OP0 is in the correct mode. If the output type is such
7260 that the operand is known to be aligned, indicate that it is.
7261 Otherwise, we need only be concerned about alignment for non-BLKmode
7265 op0 = copy_rtx (op0);
7267 if (TYPE_ALIGN_OK (type))
7268 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7269 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7270 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7272 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7273 HOST_WIDE_INT temp_size
7274 = MAX (int_size_in_bytes (inner_type),
7275 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7276 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7277 temp_size, 0, type);
7278 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7280 if (TREE_ADDRESSABLE (exp))
7283 if (GET_MODE (op0) == BLKmode)
7284 emit_block_move (new_with_op0_mode, op0,
7285 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7286 (modifier == EXPAND_STACK_PARM
7287 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7289 emit_move_insn (new_with_op0_mode, op0);
7294 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7300 this_optab = ! unsignedp && flag_trapv
7301 && (GET_MODE_CLASS (mode) == MODE_INT)
7302 ? addv_optab : add_optab;
7304 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7305 something else, make sure we add the register to the constant and
7306 then to the other thing. This case can occur during strength
7307 reduction and doing it this way will produce better code if the
7308 frame pointer or argument pointer is eliminated.
7310 fold-const.c will ensure that the constant is always in the inner
7311 PLUS_EXPR, so the only case we need to do anything about is if
7312 sp, ap, or fp is our second argument, in which case we must swap
7313 the innermost first argument and our second argument. */
7315 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7316 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7317 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7318 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7319 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7320 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7322 tree t = TREE_OPERAND (exp, 1);
7324 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7325 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7328 /* If the result is to be ptr_mode and we are adding an integer to
7329 something, we might be forming a constant. So try to use
7330 plus_constant. If it produces a sum and we can't accept it,
7331 use force_operand. This allows P = &ARR[const] to generate
7332 efficient code on machines where a SYMBOL_REF is not a valid
7335 If this is an EXPAND_SUM call, always return the sum. */
7336 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7337 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7339 if (modifier == EXPAND_STACK_PARM)
7341 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7342 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7343 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7347 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7349 /* Use immed_double_const to ensure that the constant is
7350 truncated according to the mode of OP1, then sign extended
7351 to a HOST_WIDE_INT. Using the constant directly can result
7352 in non-canonical RTL in a 64x32 cross compile. */
7354 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7356 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7357 op1 = plus_constant (op1, INTVAL (constant_part));
7358 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7359 op1 = force_operand (op1, target);
7360 return REDUCE_BIT_FIELD (op1);
7363 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7364 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7365 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7369 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7370 (modifier == EXPAND_INITIALIZER
7371 ? EXPAND_INITIALIZER : EXPAND_SUM));
7372 if (! CONSTANT_P (op0))
7374 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7375 VOIDmode, modifier);
7376 /* Return a PLUS if modifier says it's OK. */
7377 if (modifier == EXPAND_SUM
7378 || modifier == EXPAND_INITIALIZER)
7379 return simplify_gen_binary (PLUS, mode, op0, op1);
7382 /* Use immed_double_const to ensure that the constant is
7383 truncated according to the mode of OP1, then sign extended
7384 to a HOST_WIDE_INT. Using the constant directly can result
7385 in non-canonical RTL in a 64x32 cross compile. */
7387 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7389 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7390 op0 = plus_constant (op0, INTVAL (constant_part));
7391 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7392 op0 = force_operand (op0, target);
7393 return REDUCE_BIT_FIELD (op0);
7397 /* No sense saving up arithmetic to be done
7398 if it's all in the wrong mode to form part of an address.
7399 And force_operand won't know whether to sign-extend or
7401 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7402 || mode != ptr_mode)
7404 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7405 subtarget, &op0, &op1, 0);
7406 if (op0 == const0_rtx)
7408 if (op1 == const0_rtx)
7413 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7414 subtarget, &op0, &op1, modifier);
7415 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7418 /* For initializers, we are allowed to return a MINUS of two
7419 symbolic constants. Here we handle all cases when both operands
7421 /* Handle difference of two symbolic constants,
7422 for the sake of an initializer. */
7423 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7424 && really_constant_p (TREE_OPERAND (exp, 0))
7425 && really_constant_p (TREE_OPERAND (exp, 1)))
7427 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7428 NULL_RTX, &op0, &op1, modifier);
7430 /* If the last operand is a CONST_INT, use plus_constant of
7431 the negated constant. Else make the MINUS. */
7432 if (GET_CODE (op1) == CONST_INT)
7433 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7435 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7438 this_optab = ! unsignedp && flag_trapv
7439 && (GET_MODE_CLASS(mode) == MODE_INT)
7440 ? subv_optab : sub_optab;
7442 /* No sense saving up arithmetic to be done
7443 if it's all in the wrong mode to form part of an address.
7444 And force_operand won't know whether to sign-extend or
7446 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7447 || mode != ptr_mode)
7450 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7451 subtarget, &op0, &op1, modifier);
7453 /* Convert A - const to A + (-const). */
7454 if (GET_CODE (op1) == CONST_INT)
7456 op1 = negate_rtx (mode, op1);
7457 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7463 /* If first operand is constant, swap them.
7464 Thus the following special case checks need only
7465 check the second operand. */
7466 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7468 tree t1 = TREE_OPERAND (exp, 0);
7469 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7470 TREE_OPERAND (exp, 1) = t1;
7473 /* Attempt to return something suitable for generating an
7474 indexed address, for machines that support that. */
7476 if (modifier == EXPAND_SUM && mode == ptr_mode
7477 && host_integerp (TREE_OPERAND (exp, 1), 0))
7479 tree exp1 = TREE_OPERAND (exp, 1);
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7485 op0 = force_operand (op0, NULL_RTX);
7487 op0 = copy_to_mode_reg (mode, op0);
7489 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7490 gen_int_mode (tree_low_cst (exp1, 0),
7491 TYPE_MODE (TREE_TYPE (exp1)))));
7494 if (modifier == EXPAND_STACK_PARM)
7497 /* Check for multiplying things that have been extended
7498 from a narrower type. If this machine supports multiplying
7499 in that narrower type with a result in the desired type,
7500 do it that way, and avoid the explicit type-conversion. */
7501 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7502 && TREE_CODE (type) == INTEGER_TYPE
7503 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7504 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7505 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7506 && int_fits_type_p (TREE_OPERAND (exp, 1),
7507 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7508 /* Don't use a widening multiply if a shift will do. */
7509 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7510 > HOST_BITS_PER_WIDE_INT)
7511 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7513 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7514 && (TYPE_PRECISION (TREE_TYPE
7515 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7516 == TYPE_PRECISION (TREE_TYPE
7518 (TREE_OPERAND (exp, 0), 0))))
7519 /* If both operands are extended, they must either both
7520 be zero-extended or both be sign-extended. */
7521 && (TYPE_UNSIGNED (TREE_TYPE
7522 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7523 == TYPE_UNSIGNED (TREE_TYPE
7525 (TREE_OPERAND (exp, 0), 0)))))))
7527 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7528 enum machine_mode innermode = TYPE_MODE (op0type);
7529 bool zextend_p = TYPE_UNSIGNED (op0type);
7530 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7531 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7533 if (mode == GET_MODE_WIDER_MODE (innermode))
7535 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7537 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7538 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7539 TREE_OPERAND (exp, 1),
7540 NULL_RTX, &op0, &op1, 0);
7542 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7543 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7544 NULL_RTX, &op0, &op1, 0);
7547 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7548 && innermode == word_mode)
7551 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7552 NULL_RTX, VOIDmode, 0);
7553 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7554 op1 = convert_modes (innermode, mode,
7555 expand_expr (TREE_OPERAND (exp, 1),
7556 NULL_RTX, VOIDmode, 0),
7559 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7560 NULL_RTX, VOIDmode, 0);
7561 temp = expand_binop (mode, other_optab, op0, op1, target,
7562 unsignedp, OPTAB_LIB_WIDEN);
7563 hipart = gen_highpart (innermode, temp);
7564 htem = expand_mult_highpart_adjust (innermode, hipart,
7568 emit_move_insn (hipart, htem);
7569 return REDUCE_BIT_FIELD (temp);
7573 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7574 subtarget, &op0, &op1, 0);
7575 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7577 case TRUNC_DIV_EXPR:
7578 case FLOOR_DIV_EXPR:
7580 case ROUND_DIV_EXPR:
7581 case EXACT_DIV_EXPR:
7582 if (modifier == EXPAND_STACK_PARM)
7584 /* Possible optimization: compute the dividend with EXPAND_SUM
7585 then if the divisor is constant can optimize the case
7586 where some terms of the dividend have coeffs divisible by it. */
7587 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7588 subtarget, &op0, &op1, 0);
7589 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7592 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7593 expensive divide. If not, combine will rebuild the original
7595 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7596 && TREE_CODE (type) == REAL_TYPE
7597 && !real_onep (TREE_OPERAND (exp, 0)))
7598 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7599 build2 (RDIV_EXPR, type,
7600 build_real (type, dconst1),
7601 TREE_OPERAND (exp, 1))),
7602 target, tmode, modifier);
7603 this_optab = sdiv_optab;
7606 case TRUNC_MOD_EXPR:
7607 case FLOOR_MOD_EXPR:
7609 case ROUND_MOD_EXPR:
7610 if (modifier == EXPAND_STACK_PARM)
7612 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7613 subtarget, &op0, &op1, 0);
7614 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7616 case FIX_ROUND_EXPR:
7617 case FIX_FLOOR_EXPR:
7619 abort (); /* Not used for C. */
7621 case FIX_TRUNC_EXPR:
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7623 if (target == 0 || modifier == EXPAND_STACK_PARM)
7624 target = gen_reg_rtx (mode);
7625 expand_fix (target, op0, unsignedp);
7629 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7630 if (target == 0 || modifier == EXPAND_STACK_PARM)
7631 target = gen_reg_rtx (mode);
7632 /* expand_float can't figure out what to do if FROM has VOIDmode.
7633 So give it the correct mode. With -O, cse will optimize this. */
7634 if (GET_MODE (op0) == VOIDmode)
7635 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7637 expand_float (target, op0,
7638 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7642 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7643 if (modifier == EXPAND_STACK_PARM)
7645 temp = expand_unop (mode,
7646 ! unsignedp && flag_trapv
7647 && (GET_MODE_CLASS(mode) == MODE_INT)
7648 ? negv_optab : neg_optab, op0, target, 0);
7651 return REDUCE_BIT_FIELD (temp);
7654 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7655 if (modifier == EXPAND_STACK_PARM)
7658 /* ABS_EXPR is not valid for complex arguments. */
7659 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7660 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7663 /* Unsigned abs is simply the operand. Testing here means we don't
7664 risk generating incorrect code below. */
7665 if (TYPE_UNSIGNED (type))
7668 return expand_abs (mode, op0, target, unsignedp,
7669 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7673 target = original_target;
7675 || modifier == EXPAND_STACK_PARM
7676 || (MEM_P (target) && MEM_VOLATILE_P (target))
7677 || GET_MODE (target) != mode
7679 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7680 target = gen_reg_rtx (mode);
7681 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7682 target, &op0, &op1, 0);
7684 /* First try to do it with a special MIN or MAX instruction.
7685 If that does not win, use a conditional jump to select the proper
7687 this_optab = (unsignedp
7688 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7689 : (code == MIN_EXPR ? smin_optab : smax_optab));
7691 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7696 /* At this point, a MEM target is no longer useful; we will get better
7700 target = gen_reg_rtx (mode);
7702 /* If op1 was placed in target, swap op0 and op1. */
7703 if (target != op0 && target == op1)
7711 emit_move_insn (target, op0);
7713 op0 = gen_label_rtx ();
7715 /* If this mode is an integer too wide to compare properly,
7716 compare word by word. Rely on cse to optimize constant cases. */
7717 if (GET_MODE_CLASS (mode) == MODE_INT
7718 && ! can_compare_p (GE, mode, ccp_jump))
7720 if (code == MAX_EXPR)
7721 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7724 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7729 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7730 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7732 emit_move_insn (target, op1);
7737 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7738 if (modifier == EXPAND_STACK_PARM)
7740 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7745 /* ??? Can optimize bitwise operations with one arg constant.
7746 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7747 and (a bitwise1 b) bitwise2 b (etc)
7748 but that is probably not worth while. */
7750 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7751 boolean values when we want in all cases to compute both of them. In
7752 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7753 as actual zero-or-1 values and then bitwise anding. In cases where
7754 there cannot be any side effects, better code would be made by
7755 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7756 how to recognize those cases. */
7758 case TRUTH_AND_EXPR:
7760 this_optab = and_optab;
7765 this_optab = ior_optab;
7768 case TRUTH_XOR_EXPR:
7770 this_optab = xor_optab;
7777 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7779 if (modifier == EXPAND_STACK_PARM)
7781 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7782 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7785 /* Could determine the answer when only additive constants differ. Also,
7786 the addition of one can be handled by changing the condition. */
7793 case UNORDERED_EXPR:
7801 temp = do_store_flag (exp,
7802 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7803 tmode != VOIDmode ? tmode : mode, 0);
7807 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7808 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7810 && REG_P (original_target)
7811 && (GET_MODE (original_target)
7812 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7814 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7817 /* If temp is constant, we can just compute the result. */
7818 if (GET_CODE (temp) == CONST_INT)
7820 if (INTVAL (temp) != 0)
7821 emit_move_insn (target, const1_rtx);
7823 emit_move_insn (target, const0_rtx);
7828 if (temp != original_target)
7830 enum machine_mode mode1 = GET_MODE (temp);
7831 if (mode1 == VOIDmode)
7832 mode1 = tmode != VOIDmode ? tmode : mode;
7834 temp = copy_to_mode_reg (mode1, temp);
7837 op1 = gen_label_rtx ();
7838 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7839 GET_MODE (temp), unsignedp, op1);
7840 emit_move_insn (temp, const1_rtx);
7845 /* If no set-flag instruction, must generate a conditional
7846 store into a temporary variable. Drop through
7847 and handle this like && and ||. */
7849 case TRUTH_ANDIF_EXPR:
7850 case TRUTH_ORIF_EXPR:
7853 || modifier == EXPAND_STACK_PARM
7854 || ! safe_from_p (target, exp, 1)
7855 /* Make sure we don't have a hard reg (such as function's return
7856 value) live across basic blocks, if not optimizing. */
7857 || (!optimize && REG_P (target)
7858 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7859 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7862 emit_clr_insn (target);
7864 op1 = gen_label_rtx ();
7865 jumpifnot (exp, op1);
7868 emit_0_to_1_insn (target);
7871 return ignore ? const0_rtx : target;
7873 case TRUTH_NOT_EXPR:
7874 if (modifier == EXPAND_STACK_PARM)
7876 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7877 /* The parser is careful to generate TRUTH_NOT_EXPR
7878 only with operands that are always zero or one. */
7879 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7880 target, 1, OPTAB_LIB_WIDEN);
7886 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7887 return expand_expr_real (TREE_OPERAND (exp, 1),
7888 (ignore ? const0_rtx : target),
7889 VOIDmode, modifier, alt_rtl);
7891 case STATEMENT_LIST:
7893 tree_stmt_iterator iter;
7898 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7899 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7904 /* If it's void, we don't need to worry about computing a value. */
7905 if (VOID_TYPE_P (TREE_TYPE (exp)))
7907 tree pred = TREE_OPERAND (exp, 0);
7908 tree then_ = TREE_OPERAND (exp, 1);
7909 tree else_ = TREE_OPERAND (exp, 2);
7911 if (TREE_CODE (then_) == GOTO_EXPR
7912 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
7914 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7915 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7917 else if (TREE_CODE (else_) == GOTO_EXPR
7918 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
7920 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
7921 return expand_expr (then_, const0_rtx, VOIDmode, 0);
7924 /* Just use the 'if' machinery. */
7925 expand_start_cond (pred, 0);
7926 expand_expr (then_, const0_rtx, VOIDmode, 0);
7930 /* Iterate over 'else if's instead of recursing. */
7931 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
7933 expand_start_else ();
7934 if (EXPR_HAS_LOCATION (exp))
7936 emit_line_note (EXPR_LOCATION (exp));
7937 record_block_change (TREE_BLOCK (exp));
7939 expand_elseif (TREE_OPERAND (exp, 0));
7940 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
7942 /* Don't emit the jump and label if there's no 'else' clause. */
7943 if (TREE_SIDE_EFFECTS (exp))
7945 expand_start_else ();
7946 expand_expr (exp, const0_rtx, VOIDmode, 0);
7952 /* If we would have a "singleton" (see below) were it not for a
7953 conversion in each arm, bring that conversion back out. */
7954 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7955 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7956 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7957 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7959 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7960 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7962 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7963 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7964 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7965 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7966 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7967 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7968 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7969 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7970 return expand_expr (build1 (NOP_EXPR, type,
7971 build3 (COND_EXPR, TREE_TYPE (iftrue),
7972 TREE_OPERAND (exp, 0),
7974 target, tmode, modifier);
7978 /* Note that COND_EXPRs whose type is a structure or union
7979 are required to be constructed to contain assignments of
7980 a temporary variable, so that we can evaluate them here
7981 for side effect only. If type is void, we must do likewise. */
7983 /* If an arm of the branch requires a cleanup,
7984 only that cleanup is performed. */
7987 tree binary_op = 0, unary_op = 0;
7989 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7990 convert it to our mode, if necessary. */
7991 if (integer_onep (TREE_OPERAND (exp, 1))
7992 && integer_zerop (TREE_OPERAND (exp, 2))
7993 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7997 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8002 if (modifier == EXPAND_STACK_PARM)
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8005 if (GET_MODE (op0) == mode)
8009 target = gen_reg_rtx (mode);
8010 convert_move (target, op0, unsignedp);
8014 /* Check for X ? A + B : A. If we have this, we can copy A to the
8015 output and conditionally add B. Similarly for unary operations.
8016 Don't do this if X has side-effects because those side effects
8017 might affect A or B and the "?" operation is a sequence point in
8018 ANSI. (operand_equal_p tests for side effects.) */
8020 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8021 && operand_equal_p (TREE_OPERAND (exp, 2),
8022 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8023 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8024 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8025 && operand_equal_p (TREE_OPERAND (exp, 1),
8026 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8027 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8028 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8029 && operand_equal_p (TREE_OPERAND (exp, 2),
8030 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8031 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8032 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8033 && operand_equal_p (TREE_OPERAND (exp, 1),
8034 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8035 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8037 /* If we are not to produce a result, we have no target. Otherwise,
8038 if a target was specified use it; it will not be used as an
8039 intermediate target unless it is safe. If no target, use a
8044 else if (modifier == EXPAND_STACK_PARM)
8045 temp = assign_temp (type, 0, 0, 1);
8046 else if (original_target
8047 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8048 || (singleton && REG_P (original_target)
8049 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8050 && original_target == var_rtx (singleton)))
8051 && GET_MODE (original_target) == mode
8052 #ifdef HAVE_conditional_move
8053 && (! can_conditionally_move_p (mode)
8054 || REG_P (original_target)
8055 || TREE_ADDRESSABLE (type))
8057 && (!MEM_P (original_target)
8058 || TREE_ADDRESSABLE (type)))
8059 temp = original_target;
8060 else if (TREE_ADDRESSABLE (type))
8063 temp = assign_temp (type, 0, 0, 1);
8065 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8066 do the test of X as a store-flag operation, do this as
8067 A + ((X != 0) << log C). Similarly for other simple binary
8068 operators. Only do for C == 1 if BRANCH_COST is low. */
8069 if (temp && singleton && binary_op
8070 && (TREE_CODE (binary_op) == PLUS_EXPR
8071 || TREE_CODE (binary_op) == MINUS_EXPR
8072 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8073 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8074 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8075 : integer_onep (TREE_OPERAND (binary_op, 1)))
8076 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8080 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8081 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8082 ? addv_optab : add_optab)
8083 : TREE_CODE (binary_op) == MINUS_EXPR
8084 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8085 ? subv_optab : sub_optab)
8086 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8089 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8090 if (singleton == TREE_OPERAND (exp, 1))
8091 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8093 cond = TREE_OPERAND (exp, 0);
8095 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8097 mode, BRANCH_COST <= 1);
8099 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8100 result = expand_shift (LSHIFT_EXPR, mode, result,
8101 build_int_2 (tree_log2
8105 (safe_from_p (temp, singleton, 1)
8106 ? temp : NULL_RTX), 0);
8110 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8111 return expand_binop (mode, boptab, op1, result, temp,
8112 unsignedp, OPTAB_LIB_WIDEN);
8116 do_pending_stack_adjust ();
8118 op0 = gen_label_rtx ();
8120 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8124 /* If the target conflicts with the other operand of the
8125 binary op, we can't use it. Also, we can't use the target
8126 if it is a hard register, because evaluating the condition
8127 might clobber it. */
8129 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8131 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8132 temp = gen_reg_rtx (mode);
8133 store_expr (singleton, temp,
8134 modifier == EXPAND_STACK_PARM ? 2 : 0);
8137 expand_expr (singleton,
8138 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8139 if (singleton == TREE_OPERAND (exp, 1))
8140 jumpif (TREE_OPERAND (exp, 0), op0);
8142 jumpifnot (TREE_OPERAND (exp, 0), op0);
8144 if (binary_op && temp == 0)
8145 /* Just touch the other operand. */
8146 expand_expr (TREE_OPERAND (binary_op, 1),
8147 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8149 store_expr (build2 (TREE_CODE (binary_op), type,
8150 make_tree (type, temp),
8151 TREE_OPERAND (binary_op, 1)),
8152 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8154 store_expr (build1 (TREE_CODE (unary_op), type,
8155 make_tree (type, temp)),
8156 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8159 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8160 comparison operator. If we have one of these cases, set the
8161 output to A, branch on A (cse will merge these two references),
8162 then set the output to FOO. */
8164 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8165 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8166 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8167 TREE_OPERAND (exp, 1), 0)
8168 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8169 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8170 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8173 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8174 temp = gen_reg_rtx (mode);
8175 store_expr (TREE_OPERAND (exp, 1), temp,
8176 modifier == EXPAND_STACK_PARM ? 2 : 0);
8177 jumpif (TREE_OPERAND (exp, 0), op0);
8179 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8180 store_expr (TREE_OPERAND (exp, 2), temp,
8181 modifier == EXPAND_STACK_PARM ? 2 : 0);
8183 expand_expr (TREE_OPERAND (exp, 2),
8184 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8188 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8189 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8190 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8191 TREE_OPERAND (exp, 2), 0)
8192 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8193 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8194 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8197 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8198 temp = gen_reg_rtx (mode);
8199 store_expr (TREE_OPERAND (exp, 2), temp,
8200 modifier == EXPAND_STACK_PARM ? 2 : 0);
8201 jumpifnot (TREE_OPERAND (exp, 0), op0);
8203 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8204 store_expr (TREE_OPERAND (exp, 1), temp,
8205 modifier == EXPAND_STACK_PARM ? 2 : 0);
8207 expand_expr (TREE_OPERAND (exp, 1),
8208 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8213 op1 = gen_label_rtx ();
8214 jumpifnot (TREE_OPERAND (exp, 0), op0);
8216 /* One branch of the cond can be void, if it never returns. For
8217 example A ? throw : E */
8219 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8220 store_expr (TREE_OPERAND (exp, 1), temp,
8221 modifier == EXPAND_STACK_PARM ? 2 : 0);
8223 expand_expr (TREE_OPERAND (exp, 1),
8224 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8225 emit_jump_insn (gen_jump (op1));
8229 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8230 store_expr (TREE_OPERAND (exp, 2), temp,
8231 modifier == EXPAND_STACK_PARM ? 2 : 0);
8233 expand_expr (TREE_OPERAND (exp, 2),
8234 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8245 tree lhs = TREE_OPERAND (exp, 0);
8246 tree rhs = TREE_OPERAND (exp, 1);
8248 temp = expand_assignment (lhs, rhs, ! ignore);
8254 /* If lhs is complex, expand calls in rhs before computing it.
8255 That's so we don't compute a pointer and save it over a
8256 call. If lhs is simple, compute it first so we can give it
8257 as a target if the rhs is just a call. This avoids an
8258 extra temp and copy and that prevents a partial-subsumption
8259 which makes bad code. Actually we could treat
8260 component_ref's of vars like vars. */
8262 tree lhs = TREE_OPERAND (exp, 0);
8263 tree rhs = TREE_OPERAND (exp, 1);
8267 /* Check for |= or &= of a bitfield of size one into another bitfield
8268 of size 1. In this case, (unless we need the result of the
8269 assignment) we can do this more efficiently with a
8270 test followed by an assignment, if necessary.
8272 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8273 things change so we do, this code should be enhanced to
8276 && TREE_CODE (lhs) == COMPONENT_REF
8277 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8278 || TREE_CODE (rhs) == BIT_AND_EXPR)
8279 && TREE_OPERAND (rhs, 0) == lhs
8280 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8281 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8282 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8284 rtx label = gen_label_rtx ();
8286 do_jump (TREE_OPERAND (rhs, 1),
8287 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8288 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8289 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8290 (TREE_CODE (rhs) == BIT_IOR_EXPR
8292 : integer_zero_node)),
8294 do_pending_stack_adjust ();
8299 temp = expand_assignment (lhs, rhs, ! ignore);
8305 if (!TREE_OPERAND (exp, 0))
8306 expand_null_return ();
8308 expand_return (TREE_OPERAND (exp, 0));
8312 if (modifier == EXPAND_STACK_PARM)
8314 /* If we are taking the address of something erroneous, just
8316 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8318 /* If we are taking the address of a constant and are at the
8319 top level, we have to use output_constant_def since we can't
8320 call force_const_mem at top level. */
8322 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8323 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8325 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8328 /* We make sure to pass const0_rtx down if we came in with
8329 ignore set, to avoid doing the cleanups twice for something. */
8330 op0 = expand_expr (TREE_OPERAND (exp, 0),
8331 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8332 (modifier == EXPAND_INITIALIZER
8333 ? modifier : EXPAND_CONST_ADDRESS));
8335 /* If we are going to ignore the result, OP0 will have been set
8336 to const0_rtx, so just return it. Don't get confused and
8337 think we are taking the address of the constant. */
8341 /* We would like the object in memory. If it is a constant, we can
8342 have it be statically allocated into memory. For a non-constant,
8343 we need to allocate some memory and store the value into it. */
8345 if (CONSTANT_P (op0))
8346 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8348 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8349 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8350 || GET_CODE (op0) == LO_SUM)
8352 /* If this object is in a register, it can't be BLKmode. */
8353 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8354 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8356 if (GET_CODE (op0) == PARALLEL)
8357 /* Handle calls that pass values in multiple
8358 non-contiguous locations. The Irix 6 ABI has examples
8360 emit_group_store (memloc, op0, inner_type,
8361 int_size_in_bytes (inner_type));
8363 emit_move_insn (memloc, op0);
8371 mark_temp_addr_taken (op0);
8372 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8374 op0 = XEXP (op0, 0);
8375 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8376 op0 = convert_memory_address (ptr_mode, op0);
8380 /* If OP0 is not aligned as least as much as the type requires, we
8381 need to make a temporary, copy OP0 to it, and take the address of
8382 the temporary. We want to use the alignment of the type, not of
8383 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8384 the test for BLKmode means that can't happen. The test for
8385 BLKmode is because we never make mis-aligned MEMs with
8388 We don't need to do this at all if the machine doesn't have
8389 strict alignment. */
8390 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8391 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8393 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8395 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8398 if (TYPE_ALIGN_OK (inner_type))
8401 if (TREE_ADDRESSABLE (inner_type))
8403 /* We can't make a bitwise copy of this object, so fail. */
8404 error ("cannot take the address of an unaligned member");
8408 new = assign_stack_temp_for_type
8409 (TYPE_MODE (inner_type),
8410 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8411 : int_size_in_bytes (inner_type),
8412 1, build_qualified_type (inner_type,
8413 (TYPE_QUALS (inner_type)
8414 | TYPE_QUAL_CONST)));
8416 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8417 (modifier == EXPAND_STACK_PARM
8418 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8423 op0 = force_operand (XEXP (op0, 0), target);
8428 && modifier != EXPAND_CONST_ADDRESS
8429 && modifier != EXPAND_INITIALIZER
8430 && modifier != EXPAND_SUM)
8431 op0 = force_reg (Pmode, op0);
8434 && ! REG_USERVAR_P (op0))
8435 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8437 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8438 op0 = convert_memory_address (ptr_mode, op0);
8442 case ENTRY_VALUE_EXPR:
8445 /* COMPLEX type for Extended Pascal & Fortran */
8448 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8451 /* Get the rtx code of the operands. */
8452 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8453 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8456 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8460 /* Move the real (op0) and imaginary (op1) parts to their location. */
8461 emit_move_insn (gen_realpart (mode, target), op0);
8462 emit_move_insn (gen_imagpart (mode, target), op1);
8464 insns = get_insns ();
8467 /* Complex construction should appear as a single unit. */
8468 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8469 each with a separate pseudo as destination.
8470 It's not correct for flow to treat them as a unit. */
8471 if (GET_CODE (target) != CONCAT)
8472 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8481 return gen_realpart (mode, op0);
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8485 return gen_imagpart (mode, op0);
8489 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8493 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8496 target = gen_reg_rtx (mode);
8500 /* Store the realpart and the negated imagpart to target. */
8501 emit_move_insn (gen_realpart (partmode, target),
8502 gen_realpart (partmode, op0));
8504 imag_t = gen_imagpart (partmode, target);
8505 temp = expand_unop (partmode,
8506 ! unsignedp && flag_trapv
8507 && (GET_MODE_CLASS(partmode) == MODE_INT)
8508 ? negv_optab : neg_optab,
8509 gen_imagpart (partmode, op0), imag_t, 0);
8511 emit_move_insn (imag_t, temp);
8513 insns = get_insns ();
8516 /* Conjugate should appear as a single unit
8517 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8518 each with a separate pseudo as destination.
8519 It's not correct for flow to treat them as a unit. */
8520 if (GET_CODE (target) != CONCAT)
8521 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8529 expand_resx_expr (exp);
8532 case TRY_CATCH_EXPR:
8534 case EH_FILTER_EXPR:
8535 case TRY_FINALLY_EXPR:
8536 /* Lowered by tree-eh.c. */
8539 case WITH_CLEANUP_EXPR:
8540 case CLEANUP_POINT_EXPR:
8542 case CASE_LABEL_EXPR:
8545 /* Lowered by gimplify.c. */
8549 return get_exception_pointer (cfun);
8552 return get_exception_filter (cfun);
8554 case PREINCREMENT_EXPR:
8555 case PREDECREMENT_EXPR:
8556 case POSTINCREMENT_EXPR:
8557 case POSTDECREMENT_EXPR:
8559 /* Function descriptors are not valid except for as
8560 initialization constants, and should not be expanded. */
8564 expand_start_case (SWITCH_COND (exp));
8565 /* The switch body is lowered in gimplify.c, we should never have
8566 switches with a non-NULL SWITCH_BODY here. */
8567 if (SWITCH_BODY (exp))
8569 if (SWITCH_LABELS (exp))
8571 tree vec = SWITCH_LABELS (exp);
8572 size_t i = TREE_VEC_LENGTH (vec);
8576 tree elt = TREE_VEC_ELT (vec, --i);
8577 add_case_node (CASE_LOW (elt), CASE_HIGH (elt),
8584 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8588 expand_label (TREE_OPERAND (exp, 0));
8592 expand_asm_expr (exp);
8595 case WITH_SIZE_EXPR:
8596 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8597 have pulled out the size to use in whatever context it needed. */
8598 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8602 return lang_hooks.expand_expr (exp, original_target, tmode,
8606 /* Here to do an ordinary binary operator, generating an instruction
8607 from the optab already placed in `this_optab'. */
8609 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8610 subtarget, &op0, &op1, 0);
8612 if (modifier == EXPAND_STACK_PARM)
8614 temp = expand_binop (mode, this_optab, op0, op1, target,
8615 unsignedp, OPTAB_LIB_WIDEN);
8618 return REDUCE_BIT_FIELD (temp);
8620 #undef REDUCE_BIT_FIELD
8622 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8623 signedness of TYPE), possibly returning the result in TARGET. */
8625 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8627 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8628 if (target && GET_MODE (target) != GET_MODE (exp))
8630 if (TYPE_UNSIGNED (type))
8633 if (prec < HOST_BITS_PER_WIDE_INT)
8634 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8637 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8638 ((unsigned HOST_WIDE_INT) 1
8639 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8641 return expand_and (GET_MODE (exp), exp, mask, target);
8645 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8646 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8647 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8651 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8652 when applied to the address of EXP produces an address known to be
8653 aligned more than BIGGEST_ALIGNMENT. */
8656 is_aligning_offset (tree offset, tree exp)
8658 /* Strip off any conversions. */
8659 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8660 || TREE_CODE (offset) == NOP_EXPR
8661 || TREE_CODE (offset) == CONVERT_EXPR)
8662 offset = TREE_OPERAND (offset, 0);
8664 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8665 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8666 if (TREE_CODE (offset) != BIT_AND_EXPR
8667 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8668 || compare_tree_int (TREE_OPERAND (offset, 1),
8669 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8670 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8673 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8674 It must be NEGATE_EXPR. Then strip any more conversions. */
8675 offset = TREE_OPERAND (offset, 0);
8676 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8677 || TREE_CODE (offset) == NOP_EXPR
8678 || TREE_CODE (offset) == CONVERT_EXPR)
8679 offset = TREE_OPERAND (offset, 0);
8681 if (TREE_CODE (offset) != NEGATE_EXPR)
8684 offset = TREE_OPERAND (offset, 0);
8685 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8686 || TREE_CODE (offset) == NOP_EXPR
8687 || TREE_CODE (offset) == CONVERT_EXPR)
8688 offset = TREE_OPERAND (offset, 0);
8690 /* This must now be the address of EXP. */
8691 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8694 /* Return the tree node if an ARG corresponds to a string constant or zero
8695 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8696 in bytes within the string that ARG is accessing. The type of the
8697 offset will be `sizetype'. */
8700 string_constant (tree arg, tree *ptr_offset)
8704 if (TREE_CODE (arg) == ADDR_EXPR
8705 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8707 *ptr_offset = size_zero_node;
8708 return TREE_OPERAND (arg, 0);
8710 if (TREE_CODE (arg) == ADDR_EXPR
8711 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8712 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8714 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8715 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8717 else if (TREE_CODE (arg) == PLUS_EXPR)
8719 tree arg0 = TREE_OPERAND (arg, 0);
8720 tree arg1 = TREE_OPERAND (arg, 1);
8725 if (TREE_CODE (arg0) == ADDR_EXPR
8726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8728 *ptr_offset = convert (sizetype, arg1);
8729 return TREE_OPERAND (arg0, 0);
8731 else if (TREE_CODE (arg1) == ADDR_EXPR
8732 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8734 *ptr_offset = convert (sizetype, arg0);
8735 return TREE_OPERAND (arg1, 0);
8742 /* Generate code to calculate EXP using a store-flag instruction
8743 and return an rtx for the result. EXP is either a comparison
8744 or a TRUTH_NOT_EXPR whose operand is a comparison.
8746 If TARGET is nonzero, store the result there if convenient.
8748 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8751 Return zero if there is no suitable set-flag instruction
8752 available on this machine.
8754 Once expand_expr has been called on the arguments of the comparison,
8755 we are committed to doing the store flag, since it is not safe to
8756 re-evaluate the expression. We emit the store-flag insn by calling
8757 emit_store_flag, but only expand the arguments if we have a reason
8758 to believe that emit_store_flag will be successful. If we think that
8759 it will, but it isn't, we have to simulate the store-flag with a
8760 set/jump/set sequence. */
8763 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8766 tree arg0, arg1, type;
8768 enum machine_mode operand_mode;
8772 enum insn_code icode;
8773 rtx subtarget = target;
8776 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8777 result at the end. We can't simply invert the test since it would
8778 have already been inverted if it were valid. This case occurs for
8779 some floating-point comparisons. */
8781 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8782 invert = 1, exp = TREE_OPERAND (exp, 0);
8784 arg0 = TREE_OPERAND (exp, 0);
8785 arg1 = TREE_OPERAND (exp, 1);
8787 /* Don't crash if the comparison was erroneous. */
8788 if (arg0 == error_mark_node || arg1 == error_mark_node)
8791 type = TREE_TYPE (arg0);
8792 operand_mode = TYPE_MODE (type);
8793 unsignedp = TYPE_UNSIGNED (type);
8795 /* We won't bother with BLKmode store-flag operations because it would mean
8796 passing a lot of information to emit_store_flag. */
8797 if (operand_mode == BLKmode)
8800 /* We won't bother with store-flag operations involving function pointers
8801 when function pointers must be canonicalized before comparisons. */
8802 #ifdef HAVE_canonicalize_funcptr_for_compare
8803 if (HAVE_canonicalize_funcptr_for_compare
8804 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8805 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8807 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8808 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8809 == FUNCTION_TYPE))))
8816 /* Get the rtx comparison code to use. We know that EXP is a comparison
8817 operation of some type. Some comparisons against 1 and -1 can be
8818 converted to comparisons with zero. Do so here so that the tests
8819 below will be aware that we have a comparison with zero. These
8820 tests will not catch constants in the first operand, but constants
8821 are rarely passed as the first operand. */
8823 switch (TREE_CODE (exp))
8832 if (integer_onep (arg1))
8833 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8835 code = unsignedp ? LTU : LT;
8838 if (! unsignedp && integer_all_onesp (arg1))
8839 arg1 = integer_zero_node, code = LT;
8841 code = unsignedp ? LEU : LE;
8844 if (! unsignedp && integer_all_onesp (arg1))
8845 arg1 = integer_zero_node, code = GE;
8847 code = unsignedp ? GTU : GT;
8850 if (integer_onep (arg1))
8851 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8853 code = unsignedp ? GEU : GE;
8856 case UNORDERED_EXPR:
8885 /* Put a constant second. */
8886 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8888 tem = arg0; arg0 = arg1; arg1 = tem;
8889 code = swap_condition (code);
8892 /* If this is an equality or inequality test of a single bit, we can
8893 do this by shifting the bit being tested to the low-order bit and
8894 masking the result with the constant 1. If the condition was EQ,
8895 we xor it with 1. This does not require an scc insn and is faster
8896 than an scc insn even if we have it.
8898 The code to make this transformation was moved into fold_single_bit_test,
8899 so we just call into the folder and expand its result. */
8901 if ((code == NE || code == EQ)
8902 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8903 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8905 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8906 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8908 target, VOIDmode, EXPAND_NORMAL);
8911 /* Now see if we are likely to be able to do this. Return if not. */
8912 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8915 icode = setcc_gen_code[(int) code];
8916 if (icode == CODE_FOR_nothing
8917 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8919 /* We can only do this if it is one of the special cases that
8920 can be handled without an scc insn. */
8921 if ((code == LT && integer_zerop (arg1))
8922 || (! only_cheap && code == GE && integer_zerop (arg1)))
8924 else if (BRANCH_COST >= 0
8925 && ! only_cheap && (code == NE || code == EQ)
8926 && TREE_CODE (type) != REAL_TYPE
8927 && ((abs_optab->handlers[(int) operand_mode].insn_code
8928 != CODE_FOR_nothing)
8929 || (ffs_optab->handlers[(int) operand_mode].insn_code
8930 != CODE_FOR_nothing)))
8936 if (! get_subtarget (target)
8937 || GET_MODE (subtarget) != operand_mode)
8940 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8943 target = gen_reg_rtx (mode);
8945 result = emit_store_flag (target, code, op0, op1,
8946 operand_mode, unsignedp, 1);
8951 result = expand_binop (mode, xor_optab, result, const1_rtx,
8952 result, 0, OPTAB_LIB_WIDEN);
8956 /* If this failed, we have to do this with set/compare/jump/set code. */
8958 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8959 target = gen_reg_rtx (GET_MODE (target));
8961 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8962 result = compare_from_rtx (op0, op1, code, unsignedp,
8963 operand_mode, NULL_RTX);
8964 if (GET_CODE (result) == CONST_INT)
8965 return (((result == const0_rtx && ! invert)
8966 || (result != const0_rtx && invert))
8967 ? const0_rtx : const1_rtx);
8969 /* The code of RESULT may not match CODE if compare_from_rtx
8970 decided to swap its operands and reverse the original code.
8972 We know that compare_from_rtx returns either a CONST_INT or
8973 a new comparison code, so it is safe to just extract the
8974 code from RESULT. */
8975 code = GET_CODE (result);
8977 label = gen_label_rtx ();
8978 if (bcc_gen_fctn[(int) code] == 0)
8981 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8982 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8989 /* Stubs in case we haven't got a casesi insn. */
8991 # define HAVE_casesi 0
8992 # define gen_casesi(a, b, c, d, e) (0)
8993 # define CODE_FOR_casesi CODE_FOR_nothing
8996 /* If the machine does not have a case insn that compares the bounds,
8997 this means extra overhead for dispatch tables, which raises the
8998 threshold for using them. */
8999 #ifndef CASE_VALUES_THRESHOLD
9000 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9001 #endif /* CASE_VALUES_THRESHOLD */
9004 case_values_threshold (void)
9006 return CASE_VALUES_THRESHOLD;
9009 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9010 0 otherwise (i.e. if there is no casesi instruction). */
9012 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9013 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9015 enum machine_mode index_mode = SImode;
9016 int index_bits = GET_MODE_BITSIZE (index_mode);
9017 rtx op1, op2, index;
9018 enum machine_mode op_mode;
9023 /* Convert the index to SImode. */
9024 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9026 enum machine_mode omode = TYPE_MODE (index_type);
9027 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9029 /* We must handle the endpoints in the original mode. */
9030 index_expr = build2 (MINUS_EXPR, index_type,
9031 index_expr, minval);
9032 minval = integer_zero_node;
9033 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9034 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9035 omode, 1, default_label);
9036 /* Now we can safely truncate. */
9037 index = convert_to_mode (index_mode, index, 0);
9041 if (TYPE_MODE (index_type) != index_mode)
9043 index_expr = convert (lang_hooks.types.type_for_size
9044 (index_bits, 0), index_expr);
9045 index_type = TREE_TYPE (index_expr);
9048 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9051 do_pending_stack_adjust ();
9053 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9054 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9056 index = copy_to_mode_reg (op_mode, index);
9058 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9060 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9061 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9062 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9063 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9065 op1 = copy_to_mode_reg (op_mode, op1);
9067 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9069 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9070 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9071 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9072 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9074 op2 = copy_to_mode_reg (op_mode, op2);
9076 emit_jump_insn (gen_casesi (index, op1, op2,
9077 table_label, default_label));
9081 /* Attempt to generate a tablejump instruction; same concept. */
9082 #ifndef HAVE_tablejump
9083 #define HAVE_tablejump 0
9084 #define gen_tablejump(x, y) (0)
9087 /* Subroutine of the next function.
9089 INDEX is the value being switched on, with the lowest value
9090 in the table already subtracted.
9091 MODE is its expected mode (needed if INDEX is constant).
9092 RANGE is the length of the jump table.
9093 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9095 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9096 index value is out of range. */
9099 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9104 if (INTVAL (range) > cfun->max_jumptable_ents)
9105 cfun->max_jumptable_ents = INTVAL (range);
9107 /* Do an unsigned comparison (in the proper mode) between the index
9108 expression and the value which represents the length of the range.
9109 Since we just finished subtracting the lower bound of the range
9110 from the index expression, this comparison allows us to simultaneously
9111 check that the original index expression value is both greater than
9112 or equal to the minimum value of the range and less than or equal to
9113 the maximum value of the range. */
9115 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9118 /* If index is in range, it must fit in Pmode.
9119 Convert to Pmode so we can index with it. */
9121 index = convert_to_mode (Pmode, index, 1);
9123 /* Don't let a MEM slip through, because then INDEX that comes
9124 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9125 and break_out_memory_refs will go to work on it and mess it up. */
9126 #ifdef PIC_CASE_VECTOR_ADDRESS
9127 if (flag_pic && !REG_P (index))
9128 index = copy_to_mode_reg (Pmode, index);
9131 /* If flag_force_addr were to affect this address
9132 it could interfere with the tricky assumptions made
9133 about addresses that contain label-refs,
9134 which may be valid only very near the tablejump itself. */
9135 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9136 GET_MODE_SIZE, because this indicates how large insns are. The other
9137 uses should all be Pmode, because they are addresses. This code
9138 could fail if addresses and insns are not the same size. */
9139 index = gen_rtx_PLUS (Pmode,
9140 gen_rtx_MULT (Pmode, index,
9141 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9142 gen_rtx_LABEL_REF (Pmode, table_label));
9143 #ifdef PIC_CASE_VECTOR_ADDRESS
9145 index = PIC_CASE_VECTOR_ADDRESS (index);
9148 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9149 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9150 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9151 RTX_UNCHANGING_P (vector) = 1;
9152 MEM_NOTRAP_P (vector) = 1;
9153 convert_move (temp, vector, 0);
9155 emit_jump_insn (gen_tablejump (temp, table_label));
9157 /* If we are generating PIC code or if the table is PC-relative, the
9158 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9159 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9164 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9165 rtx table_label, rtx default_label)
9169 if (! HAVE_tablejump)
9172 index_expr = fold (build2 (MINUS_EXPR, index_type,
9173 convert (index_type, index_expr),
9174 convert (index_type, minval)));
9175 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9176 do_pending_stack_adjust ();
9178 do_tablejump (index, TYPE_MODE (index_type),
9179 convert_modes (TYPE_MODE (index_type),
9180 TYPE_MODE (TREE_TYPE (range)),
9181 expand_expr (range, NULL_RTX,
9183 TYPE_UNSIGNED (TREE_TYPE (range))),
9184 table_label, default_label);
9188 /* Nonzero if the mode is a valid vector mode for this architecture.
9189 This returns nonzero even if there is no hardware support for the
9190 vector mode, but we can emulate with narrower modes. */
9193 vector_mode_valid_p (enum machine_mode mode)
9195 enum mode_class class = GET_MODE_CLASS (mode);
9196 enum machine_mode innermode;
9198 /* Doh! What's going on? */
9199 if (class != MODE_VECTOR_INT
9200 && class != MODE_VECTOR_FLOAT)
9203 /* Hardware support. Woo hoo! */
9204 if (VECTOR_MODE_SUPPORTED_P (mode))
9207 innermode = GET_MODE_INNER (mode);
9209 /* We should probably return 1 if requesting V4DI and we have no DI,
9210 but we have V2DI, but this is probably very unlikely. */
9212 /* If we have support for the inner mode, we can safely emulate it.
9213 We may not have V2DI, but me can emulate with a pair of DIs. */
9214 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9217 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9219 const_vector_from_tree (tree exp)
9224 enum machine_mode inner, mode;
9226 mode = TYPE_MODE (TREE_TYPE (exp));
9228 if (initializer_zerop (exp))
9229 return CONST0_RTX (mode);
9231 units = GET_MODE_NUNITS (mode);
9232 inner = GET_MODE_INNER (mode);
9234 v = rtvec_alloc (units);
9236 link = TREE_VECTOR_CST_ELTS (exp);
9237 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9239 elt = TREE_VALUE (link);
9241 if (TREE_CODE (elt) == REAL_CST)
9242 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9245 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9246 TREE_INT_CST_HIGH (elt),
9250 /* Initialize remaining elements to 0. */
9251 for (; i < units; ++i)
9252 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9254 return gen_rtx_raw_CONST_VECTOR (mode, v);
9256 #include "gt-expr.h"