1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
147 static rtx var_rtx (tree);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movmem_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
201 /* These arrays record the insn_code of two different kinds of insns
202 to perform block compares. */
203 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
204 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
206 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 /* This is run once per compilation to set up which modes can be used
213 directly in memory and to initialize the block move optab. */
216 init_expr_once (void)
219 enum machine_mode mode;
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230 /* A scratch register we can modify in-place below to avoid
231 useless RTL allocations. */
232 reg = gen_rtx_REG (VOIDmode, -1);
234 insn = rtx_alloc (INSN);
235 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
236 PATTERN (insn) = pat;
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246 PUT_MODE (reg, mode);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 if (! HARD_REGNO_MODE_OK (regno, mode))
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
283 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
286 mode = GET_MODE_WIDER_MODE (mode))
288 enum machine_mode srcmode;
289 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
290 srcmode = GET_MODE_WIDER_MODE (srcmode))
294 ic = can_extend_p (mode, srcmode, 0);
295 if (ic == CODE_FOR_nothing)
298 PUT_MODE (mem, srcmode);
300 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
301 float_extend_from_mem[mode][srcmode] = true;
306 /* This is run at the start of compiling a function. */
311 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
314 /* Copy data from FROM to TO, where the machine modes are not the same.
315 Both modes may be integer, or both may be floating.
316 UNSIGNEDP should be nonzero if FROM is an unsigned type.
317 This causes zero-extension instead of sign-extension. */
320 convert_move (rtx to, rtx from, int unsignedp)
322 enum machine_mode to_mode = GET_MODE (to);
323 enum machine_mode from_mode = GET_MODE (from);
324 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
325 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
329 /* rtx code for making an equivalent value. */
330 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
331 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
334 if (to_real != from_real)
337 /* If the source and destination are already the same, then there's
342 /* If FROM is a SUBREG that indicates that we have already done at least
343 the required extension, strip it. We don't handle such SUBREGs as
346 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
347 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
348 >= GET_MODE_SIZE (to_mode))
349 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
350 from = gen_lowpart (to_mode, from), from_mode = to_mode;
352 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
355 if (to_mode == from_mode
356 || (from_mode == VOIDmode && CONSTANT_P (from)))
358 emit_move_insn (to, from);
362 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
364 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
367 if (VECTOR_MODE_P (to_mode))
368 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
372 emit_move_insn (to, from);
376 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
378 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
379 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
388 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
395 /* Try converting directly if the insn is supported. */
397 code = tab->handlers[to_mode][from_mode].insn_code;
398 if (code != CODE_FOR_nothing)
400 emit_unop_insn (code, to, from,
401 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
405 /* Otherwise use a libcall. */
406 libcall = tab->handlers[to_mode][from_mode].libfunc;
409 /* This conversion is not implemented yet. */
413 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 insns = get_insns ();
417 emit_libcall_block (insns, to, value,
418 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 : gen_rtx_FLOAT_EXTEND (to_mode, from));
424 /* Handle pointer conversion. */ /* SPEE 900220. */
425 /* Targets are expected to provide conversion insns between PxImode and
426 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
427 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432 if (trunc_optab->handlers[to_mode][full_mode].insn_code
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 if (sext_optab->handlers[full_mode][from_mode].insn_code
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
453 if (to_mode == full_mode)
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
460 /* Now both modes are integers. */
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
504 /* No special multiword conversion insn; do it by hand. */
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
517 lowpart_mode = from_mode;
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
524 /* Compute the value to put in each remaining word. */
526 fill_value = const0_rtx;
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
559 if (fill_value != subword)
560 emit_move_insn (subword, fill_value);
563 insns = get_insns ();
566 emit_no_conflict_block (insns, to, from, NULL_RTX,
567 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
573 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0)))
580 || GET_CODE (from) == SUBREG))
581 from = force_reg (from_mode, from);
582 convert_move (to, gen_lowpart (word_mode, from), 0);
586 /* Now follow all the conversions between integers
587 no more than a word long. */
589 /* For truncation, usually we can just refer to FROM in a narrower mode. */
590 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
591 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
592 GET_MODE_BITSIZE (from_mode)))
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
608 /* Handle extension. */
609 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
616 from = force_not_mem (from);
618 emit_unop_insn (code, to, from, equiv_code);
623 enum machine_mode intermediate;
627 /* Search for a mode to convert via. */
628 for (intermediate = from_mode; intermediate != VOIDmode;
629 intermediate = GET_MODE_WIDER_MODE (intermediate))
630 if (((can_extend_p (to_mode, intermediate, unsignedp)
632 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (intermediate))))
635 && (can_extend_p (intermediate, from_mode, unsignedp)
636 != CODE_FOR_nothing))
638 convert_move (to, convert_to_mode (intermediate, from,
639 unsignedp), unsignedp);
643 /* No suitable intermediate mode.
644 Generate what we need with shifts. */
645 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode), 0);
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
653 emit_move_insn (to, tmp);
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
680 /* Mode combination is not recognized. */
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
694 return convert_modes (mode, VOIDmode, x, unsignedp);
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
736 HOST_WIDE_INT val = INTVAL (x);
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
741 int width = GET_MODE_BITSIZE (oldmode);
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
785 return gen_int_mode (val, mode);
788 return gen_lowpart (mode, x);
791 /* Converting from integer constant into mode is always equivalent to an
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
795 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
797 return simplify_gen_subreg (mode, x, oldmode, 0);
800 temp = gen_reg_rtx (mode);
801 convert_move (temp, x, unsignedp);
805 /* STORE_MAX_PIECES is the number of bytes at a time that we can
806 store efficiently. Due to internal GCC limitations, this is
807 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
808 for an immediate constant. */
810 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
812 /* Determine whether the LEN bytes can be moved by using several move
813 instructions. Return nonzero if a call to move_by_pieces should
817 can_move_by_pieces (unsigned HOST_WIDE_INT len,
818 unsigned int align ATTRIBUTE_UNUSED)
820 return MOVE_BY_PIECES_P (len, align);
823 /* Generate several move instructions to copy LEN bytes from block FROM to
824 block TO. (These are MEM rtx's with BLKmode).
826 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
827 used to push FROM to the stack.
829 ALIGN is maximum stack alignment we can assume.
831 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
832 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
836 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
837 unsigned int align, int endp)
839 struct move_by_pieces data;
840 rtx to_addr, from_addr = XEXP (from, 0);
841 unsigned int max_size = MOVE_MAX_PIECES + 1;
842 enum machine_mode mode = VOIDmode, tmode;
843 enum insn_code icode;
845 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
848 data.from_addr = from_addr;
851 to_addr = XEXP (to, 0);
854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
864 #ifdef STACK_GROWS_DOWNWARD
870 data.to_addr = to_addr;
873 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
874 || GET_CODE (from_addr) == POST_INC
875 || GET_CODE (from_addr) == POST_DEC);
877 data.explicit_inc_from = 0;
878 data.explicit_inc_to = 0;
879 if (data.reverse) data.offset = len;
882 /* If copying requires more than two move insns,
883 copy addresses to registers (to make displacements shorter)
884 and use post-increment if available. */
885 if (!(data.autinc_from && data.autinc_to)
886 && move_by_pieces_ninsns (len, align) > 2)
888 /* Find the mode of the largest move... */
889 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
890 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
891 if (GET_MODE_SIZE (tmode) < max_size)
894 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
896 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
902 data.from_addr = copy_addr_to_reg (from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_addr_to_reg (from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
910 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
912 data.explicit_inc_to = -1;
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
916 data.to_addr = copy_addr_to_reg (to_addr);
918 data.explicit_inc_to = 1;
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_addr_to_reg (to_addr);
924 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
925 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
926 align = MOVE_MAX * BITS_PER_UNIT;
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
938 if (mode == VOIDmode)
941 icode = mov_optab->handlers[(int) mode].insn_code;
942 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
943 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
945 max_size = GET_MODE_SIZE (mode);
948 /* The code above should have handled everything. */
962 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
963 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
965 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
968 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
975 to1 = adjust_address (data.to, QImode, data.offset);
983 /* Return number of insns required to move L bytes by pieces.
984 ALIGN (in bits) is maximum alignment we can assume. */
986 static unsigned HOST_WIDE_INT
987 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
989 unsigned HOST_WIDE_INT n_insns = 0;
990 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
992 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
993 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
994 align = MOVE_MAX * BITS_PER_UNIT;
998 enum machine_mode mode = VOIDmode, tmode;
999 enum insn_code icode;
1001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1003 if (GET_MODE_SIZE (tmode) < max_size)
1006 if (mode == VOIDmode)
1009 icode = mov_optab->handlers[(int) mode].insn_code;
1010 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1011 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1013 max_size = GET_MODE_SIZE (mode);
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1026 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1027 struct move_by_pieces *data)
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1032 while (data->len >= size)
1035 data->offset -= size;
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1043 to1 = adjust_address (data->to, mode, data->offset);
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1050 from1 = adjust_address (data->from, mode, data->offset);
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 GEN_INT (-(HOST_WIDE_INT)size)));
1055 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 GEN_INT (-(HOST_WIDE_INT)size)));
1060 emit_insn ((*genfun) (to1, from1));
1063 #ifdef PUSH_ROUNDING
1064 emit_single_push_insn (mode, from1, NULL);
1070 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1071 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1073 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1075 if (! data->reverse)
1076 data->offset += size;
1082 /* Emit code to move a block Y to a block X. This may be done with
1083 string-move instructions, with multiple scalar move instructions,
1084 or with a library call.
1086 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1087 SIZE is an rtx that says how long they are.
1088 ALIGN is the maximum alignment we can assume they have.
1089 METHOD describes what kind of copy this is, and what mechanisms may be used.
1091 Return the address of the new block, if memcpy is called and returns it,
1095 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1103 case BLOCK_OP_NORMAL:
1104 may_use_call = true;
1107 case BLOCK_OP_CALL_PARM:
1108 may_use_call = block_move_libcall_safe_for_call_parm ();
1110 /* Make inhibit_defer_pop nonzero around the library call
1111 to force it to pop the arguments right away. */
1115 case BLOCK_OP_NO_LIBCALL:
1116 may_use_call = false;
1123 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (GET_CODE (size) == CONST_INT)
1141 if (INTVAL (size) == 0)
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, size);
1147 set_mem_size (y, size);
1150 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align))
1154 else if (may_use_call)
1155 retval = emit_block_move_via_libcall (x, y, size);
1157 emit_block_move_via_loop (x, y, size, align);
1159 if (method == BLOCK_OP_CALL_PARM)
1165 /* A subroutine of emit_block_move. Returns true if calling the
1166 block move libcall will not clobber any parameters which may have
1167 already been placed on the stack. */
1170 block_move_libcall_safe_for_call_parm (void)
1172 /* If arguments are pushed on the stack, then they're safe. */
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1180 tree fn = emit_block_move_libcall_fn (false);
1182 if (REG_PARM_STACK_SPACE (fn) != 0)
1187 /* If any argument goes in memory, then it might clobber an outgoing
1190 CUMULATIVE_ARGS args_so_far;
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1199 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1201 if (!tmp || !REG_P (tmp))
1203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1206 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1212 /* A subroutine of emit_block_move. Expand a movmem pattern;
1213 return true if successful. */
1216 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1218 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1219 int save_volatile_ok = volatile_ok;
1220 enum machine_mode mode;
1222 /* Since this is a move insn, we don't care about volatility. */
1225 /* Try the most limited insn first, because there's no point
1226 including more than one in the machine description unless
1227 the more limited one has some advantage. */
1229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1230 mode = GET_MODE_WIDER_MODE (mode))
1232 enum insn_code code = movmem_optab[(int) mode];
1233 insn_operand_predicate_fn pred;
1235 if (code != CODE_FOR_nothing
1236 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1237 here because if SIZE is less than the mode mask, as it is
1238 returned by the macro, it will definitely be less than the
1239 actual mode mask. */
1240 && ((GET_CODE (size) == CONST_INT
1241 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1242 <= (GET_MODE_MASK (mode) >> 1)))
1243 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1244 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1245 || (*pred) (x, BLKmode))
1246 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1247 || (*pred) (y, BLKmode))
1248 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1249 || (*pred) (opalign, VOIDmode)))
1252 rtx last = get_last_insn ();
1255 op2 = convert_to_mode (mode, size, 1);
1256 pred = insn_data[(int) code].operand[2].predicate;
1257 if (pred != 0 && ! (*pred) (op2, mode))
1258 op2 = copy_to_mode_reg (mode, op2);
1260 /* ??? When called via emit_block_move_for_call, it'd be
1261 nice if there were some way to inform the backend, so
1262 that it doesn't fail the expansion because it thinks
1263 emitting the libcall would be more efficient. */
1265 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1269 volatile_ok = save_volatile_ok;
1273 delete_insns_since (last);
1277 volatile_ok = save_volatile_ok;
1281 /* A subroutine of emit_block_move. Expand a call to memcpy.
1282 Return the return value from memcpy, 0 otherwise. */
1285 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1287 rtx dst_addr, src_addr;
1288 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1289 enum machine_mode size_mode;
1292 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1293 pseudos. We can then place those new pseudos into a VAR_DECL and
1296 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1297 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1299 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1300 src_addr = convert_memory_address (ptr_mode, src_addr);
1302 dst_tree = make_tree (ptr_type_node, dst_addr);
1303 src_tree = make_tree (ptr_type_node, src_addr);
1305 size_mode = TYPE_MODE (sizetype);
1307 size = convert_to_mode (size_mode, size, 1);
1308 size = copy_to_mode_reg (size_mode, size);
1310 /* It is incorrect to use the libcall calling conventions to call
1311 memcpy in this context. This could be a user call to memcpy and
1312 the user may wish to examine the return value from memcpy. For
1313 targets where libcalls and normal calls have different conventions
1314 for returning pointers, we could end up generating incorrect code. */
1316 size_tree = make_tree (sizetype, size);
1318 fn = emit_block_move_libcall_fn (true);
1319 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1320 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1321 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1323 /* Now we have to build up the CALL_EXPR itself. */
1324 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1325 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1326 call_expr, arg_list, NULL_TREE);
1328 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1330 /* If we are initializing a readonly value, show the above call clobbered
1331 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1332 the delay slot scheduler might overlook conflicts and take nasty
1334 if (RTX_UNCHANGING_P (dst))
1335 add_function_usage_to
1336 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1337 gen_rtx_CLOBBER (VOIDmode, dst),
1343 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1344 for the function we use for block copies. The first time FOR_CALL
1345 is true, we call assemble_external. */
1347 static GTY(()) tree block_move_fn;
1350 init_block_move_fn (const char *asmspec)
1356 fn = get_identifier ("memcpy");
1357 args = build_function_type_list (ptr_type_node, ptr_type_node,
1358 const_ptr_type_node, sizetype,
1361 fn = build_decl (FUNCTION_DECL, fn, args);
1362 DECL_EXTERNAL (fn) = 1;
1363 TREE_PUBLIC (fn) = 1;
1364 DECL_ARTIFICIAL (fn) = 1;
1365 TREE_NOTHROW (fn) = 1;
1371 set_user_assembler_name (block_move_fn, asmspec);
1375 emit_block_move_libcall_fn (int for_call)
1377 static bool emitted_extern;
1380 init_block_move_fn (NULL);
1382 if (for_call && !emitted_extern)
1384 emitted_extern = true;
1385 make_decl_rtl (block_move_fn);
1386 assemble_external (block_move_fn);
1389 return block_move_fn;
1392 /* A subroutine of emit_block_move. Copy the data via an explicit
1393 loop. This is used only when libcalls are forbidden. */
1394 /* ??? It'd be nice to copy in hunks larger than QImode. */
1397 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1398 unsigned int align ATTRIBUTE_UNUSED)
1400 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1401 enum machine_mode iter_mode;
1403 iter_mode = GET_MODE (size);
1404 if (iter_mode == VOIDmode)
1405 iter_mode = word_mode;
1407 top_label = gen_label_rtx ();
1408 cmp_label = gen_label_rtx ();
1409 iter = gen_reg_rtx (iter_mode);
1411 emit_move_insn (iter, const0_rtx);
1413 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1414 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1415 do_pending_stack_adjust ();
1417 emit_jump (cmp_label);
1418 emit_label (top_label);
1420 tmp = convert_modes (Pmode, iter_mode, iter, true);
1421 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1422 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1423 x = change_address (x, QImode, x_addr);
1424 y = change_address (y, QImode, y_addr);
1426 emit_move_insn (x, y);
1428 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1429 true, OPTAB_LIB_WIDEN);
1431 emit_move_insn (iter, tmp);
1433 emit_label (cmp_label);
1435 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1439 /* Copy all or part of a value X into registers starting at REGNO.
1440 The number of registers to be filled is NREGS. */
1443 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1446 #ifdef HAVE_load_multiple
1454 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1455 x = validize_mem (force_const_mem (mode, x));
1457 /* See if the machine can do this with a load multiple insn. */
1458 #ifdef HAVE_load_multiple
1459 if (HAVE_load_multiple)
1461 last = get_last_insn ();
1462 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1470 delete_insns_since (last);
1474 for (i = 0; i < nregs; i++)
1475 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1476 operand_subword_force (x, i, mode));
1479 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1483 move_block_from_reg (int regno, rtx x, int nregs)
1490 /* See if the machine can do this with a store multiple insn. */
1491 #ifdef HAVE_store_multiple
1492 if (HAVE_store_multiple)
1494 rtx last = get_last_insn ();
1495 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1503 delete_insns_since (last);
1507 for (i = 0; i < nregs; i++)
1509 rtx tem = operand_subword (x, i, 1, BLKmode);
1514 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1518 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1519 ORIG, where ORIG is a non-consecutive group of registers represented by
1520 a PARALLEL. The clone is identical to the original except in that the
1521 original set of registers is replaced by a new set of pseudo registers.
1522 The new set has the same modes as the original set. */
1525 gen_group_rtx (rtx orig)
1530 if (GET_CODE (orig) != PARALLEL)
1533 length = XVECLEN (orig, 0);
1534 tmps = alloca (sizeof (rtx) * length);
1536 /* Skip a NULL entry in first slot. */
1537 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1542 for (; i < length; i++)
1544 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1545 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1547 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1550 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1553 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1554 where DST is non-consecutive registers represented by a PARALLEL.
1555 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1559 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1564 if (GET_CODE (dst) != PARALLEL)
1567 /* Check for a NULL entry, used to indicate that the parameter goes
1568 both on the stack and in registers. */
1569 if (XEXP (XVECEXP (dst, 0, 0), 0))
1574 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1576 /* Process the pieces. */
1577 for (i = start; i < XVECLEN (dst, 0); i++)
1579 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1580 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1581 unsigned int bytelen = GET_MODE_SIZE (mode);
1584 /* Handle trailing fragments that run over the size of the struct. */
1585 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1587 /* Arrange to shift the fragment to where it belongs.
1588 extract_bit_field loads to the lsb of the reg. */
1590 #ifdef BLOCK_REG_PADDING
1591 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1592 == (BYTES_BIG_ENDIAN ? upward : downward)
1597 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1598 bytelen = ssize - bytepos;
1603 /* If we won't be loading directly from memory, protect the real source
1604 from strange tricks we might play; but make sure that the source can
1605 be loaded directly into the destination. */
1607 if (!MEM_P (orig_src)
1608 && (!CONSTANT_P (orig_src)
1609 || (GET_MODE (orig_src) != mode
1610 && GET_MODE (orig_src) != VOIDmode)))
1612 if (GET_MODE (orig_src) == VOIDmode)
1613 src = gen_reg_rtx (mode);
1615 src = gen_reg_rtx (GET_MODE (orig_src));
1617 emit_move_insn (src, orig_src);
1620 /* Optimize the access just a bit. */
1622 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1623 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1624 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1625 && bytelen == GET_MODE_SIZE (mode))
1627 tmps[i] = gen_reg_rtx (mode);
1628 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1630 else if (GET_CODE (src) == CONCAT)
1632 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1633 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1635 if ((bytepos == 0 && bytelen == slen0)
1636 || (bytepos != 0 && bytepos + bytelen <= slen))
1638 /* The following assumes that the concatenated objects all
1639 have the same size. In this case, a simple calculation
1640 can be used to determine the object and the bit field
1642 tmps[i] = XEXP (src, bytepos / slen0);
1643 if (! CONSTANT_P (tmps[i])
1644 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1645 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1646 (bytepos % slen0) * BITS_PER_UNIT,
1647 1, NULL_RTX, mode, mode);
1649 else if (bytepos == 0)
1651 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1652 emit_move_insn (mem, src);
1653 tmps[i] = adjust_address (mem, mode, 0);
1658 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1659 SIMD register, which is currently broken. While we get GCC
1660 to emit proper RTL for these cases, let's dump to memory. */
1661 else if (VECTOR_MODE_P (GET_MODE (dst))
1664 int slen = GET_MODE_SIZE (GET_MODE (src));
1667 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1668 emit_move_insn (mem, src);
1669 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1671 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1672 && XVECLEN (dst, 0) > 1)
1673 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1674 else if (CONSTANT_P (src)
1675 || (REG_P (src) && GET_MODE (src) == mode))
1678 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1679 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1683 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1684 build_int_2 (shift, 0), tmps[i], 0);
1687 /* Copy the extracted pieces into the proper (probable) hard regs. */
1688 for (i = start; i < XVECLEN (dst, 0); i++)
1689 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1692 /* Emit code to move a block SRC to block DST, where SRC and DST are
1693 non-consecutive groups of registers, each represented by a PARALLEL. */
1696 emit_group_move (rtx dst, rtx src)
1700 if (GET_CODE (src) != PARALLEL
1701 || GET_CODE (dst) != PARALLEL
1702 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1705 /* Skip first entry if NULL. */
1706 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1707 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1708 XEXP (XVECEXP (src, 0, i), 0));
1711 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1712 where SRC is non-consecutive registers represented by a PARALLEL.
1713 SSIZE represents the total size of block ORIG_DST, or -1 if not
1717 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1722 if (GET_CODE (src) != PARALLEL)
1725 /* Check for a NULL entry, used to indicate that the parameter goes
1726 both on the stack and in registers. */
1727 if (XEXP (XVECEXP (src, 0, 0), 0))
1732 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1734 /* Copy the (probable) hard regs into pseudos. */
1735 for (i = start; i < XVECLEN (src, 0); i++)
1737 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1738 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1739 emit_move_insn (tmps[i], reg);
1742 /* If we won't be storing directly into memory, protect the real destination
1743 from strange tricks we might play. */
1745 if (GET_CODE (dst) == PARALLEL)
1749 /* We can get a PARALLEL dst if there is a conditional expression in
1750 a return statement. In that case, the dst and src are the same,
1751 so no action is necessary. */
1752 if (rtx_equal_p (dst, src))
1755 /* It is unclear if we can ever reach here, but we may as well handle
1756 it. Allocate a temporary, and split this into a store/load to/from
1759 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1760 emit_group_store (temp, src, type, ssize);
1761 emit_group_load (dst, temp, type, ssize);
1764 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1766 dst = gen_reg_rtx (GET_MODE (orig_dst));
1767 /* Make life a bit easier for combine. */
1768 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1771 /* Process the pieces. */
1772 for (i = start; i < XVECLEN (src, 0); i++)
1774 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1775 enum machine_mode mode = GET_MODE (tmps[i]);
1776 unsigned int bytelen = GET_MODE_SIZE (mode);
1779 /* Handle trailing fragments that run over the size of the struct. */
1780 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1782 /* store_bit_field always takes its value from the lsb.
1783 Move the fragment to the lsb if it's not already there. */
1785 #ifdef BLOCK_REG_PADDING
1786 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1787 == (BYTES_BIG_ENDIAN ? upward : downward)
1793 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1794 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1795 build_int_2 (shift, 0), tmps[i], 0);
1797 bytelen = ssize - bytepos;
1800 if (GET_CODE (dst) == CONCAT)
1802 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1803 dest = XEXP (dst, 0);
1804 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1806 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1807 dest = XEXP (dst, 1);
1809 else if (bytepos == 0 && XVECLEN (src, 0))
1811 dest = assign_stack_temp (GET_MODE (dest),
1812 GET_MODE_SIZE (GET_MODE (dest)), 0);
1813 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1822 /* Optimize the access just a bit. */
1824 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1825 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1826 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1827 && bytelen == GET_MODE_SIZE (mode))
1828 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1830 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1834 /* Copy from the pseudo into the (probable) hard reg. */
1835 if (orig_dst != dst)
1836 emit_move_insn (orig_dst, dst);
1839 /* Generate code to copy a BLKmode object of TYPE out of a
1840 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1841 is null, a stack temporary is created. TGTBLK is returned.
1843 The purpose of this routine is to handle functions that return
1844 BLKmode structures in registers. Some machines (the PA for example)
1845 want to return all small structures in registers regardless of the
1846 structure's alignment. */
1849 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1851 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1852 rtx src = NULL, dst = NULL;
1853 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1854 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1858 tgtblk = assign_temp (build_qualified_type (type,
1860 | TYPE_QUAL_CONST)),
1862 preserve_temp_slots (tgtblk);
1865 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1866 into a new pseudo which is a full word. */
1868 if (GET_MODE (srcreg) != BLKmode
1869 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1870 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1872 /* If the structure doesn't take up a whole number of words, see whether
1873 SRCREG is padded on the left or on the right. If it's on the left,
1874 set PADDING_CORRECTION to the number of bits to skip.
1876 In most ABIs, the structure will be returned at the least end of
1877 the register, which translates to right padding on little-endian
1878 targets and left padding on big-endian targets. The opposite
1879 holds if the structure is returned at the most significant
1880 end of the register. */
1881 if (bytes % UNITS_PER_WORD != 0
1882 && (targetm.calls.return_in_msb (type)
1884 : BYTES_BIG_ENDIAN))
1886 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1888 /* Copy the structure BITSIZE bites at a time.
1890 We could probably emit more efficient code for machines which do not use
1891 strict alignment, but it doesn't seem worth the effort at the current
1893 for (bitpos = 0, xbitpos = padding_correction;
1894 bitpos < bytes * BITS_PER_UNIT;
1895 bitpos += bitsize, xbitpos += bitsize)
1897 /* We need a new source operand each time xbitpos is on a
1898 word boundary and when xbitpos == padding_correction
1899 (the first time through). */
1900 if (xbitpos % BITS_PER_WORD == 0
1901 || xbitpos == padding_correction)
1902 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1905 /* We need a new destination operand each time bitpos is on
1907 if (bitpos % BITS_PER_WORD == 0)
1908 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1910 /* Use xbitpos for the source extraction (right justified) and
1911 xbitpos for the destination store (left justified). */
1912 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1913 extract_bit_field (src, bitsize,
1914 xbitpos % BITS_PER_WORD, 1,
1915 NULL_RTX, word_mode, word_mode));
1921 /* Add a USE expression for REG to the (possibly empty) list pointed
1922 to by CALL_FUSAGE. REG must denote a hard register. */
1925 use_reg (rtx *call_fusage, rtx reg)
1928 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1932 = gen_rtx_EXPR_LIST (VOIDmode,
1933 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1936 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1937 starting at REGNO. All of these registers must be hard registers. */
1940 use_regs (rtx *call_fusage, int regno, int nregs)
1944 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1947 for (i = 0; i < nregs; i++)
1948 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1951 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1952 PARALLEL REGS. This is for calls that pass values in multiple
1953 non-contiguous locations. The Irix 6 ABI has examples of this. */
1956 use_group_regs (rtx *call_fusage, rtx regs)
1960 for (i = 0; i < XVECLEN (regs, 0); i++)
1962 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1964 /* A NULL entry means the parameter goes both on the stack and in
1965 registers. This can also be a MEM for targets that pass values
1966 partially on the stack and partially in registers. */
1967 if (reg != 0 && REG_P (reg))
1968 use_reg (call_fusage, reg);
1973 /* Determine whether the LEN bytes generated by CONSTFUN can be
1974 stored to memory using several move instructions. CONSTFUNDATA is
1975 a pointer which will be passed as argument in every CONSTFUN call.
1976 ALIGN is maximum alignment we can assume. Return nonzero if a
1977 call to store_by_pieces should succeed. */
1980 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1981 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1982 void *constfundata, unsigned int align)
1984 unsigned HOST_WIDE_INT max_size, l;
1985 HOST_WIDE_INT offset = 0;
1986 enum machine_mode mode, tmode;
1987 enum insn_code icode;
1994 if (! STORE_BY_PIECES_P (len, align))
1997 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1998 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1999 align = MOVE_MAX * BITS_PER_UNIT;
2001 /* We would first store what we can in the largest integer mode, then go to
2002 successively smaller modes. */
2005 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2010 max_size = STORE_MAX_PIECES + 1;
2011 while (max_size > 1)
2013 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2014 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2015 if (GET_MODE_SIZE (tmode) < max_size)
2018 if (mode == VOIDmode)
2021 icode = mov_optab->handlers[(int) mode].insn_code;
2022 if (icode != CODE_FOR_nothing
2023 && align >= GET_MODE_ALIGNMENT (mode))
2025 unsigned int size = GET_MODE_SIZE (mode);
2032 cst = (*constfun) (constfundata, offset, mode);
2033 if (!LEGITIMATE_CONSTANT_P (cst))
2043 max_size = GET_MODE_SIZE (mode);
2046 /* The code above should have handled everything. */
2054 /* Generate several move instructions to store LEN bytes generated by
2055 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2056 pointer which will be passed as argument in every CONSTFUN call.
2057 ALIGN is maximum alignment we can assume.
2058 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2059 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2063 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2064 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2065 void *constfundata, unsigned int align, int endp)
2067 struct store_by_pieces data;
2076 if (! STORE_BY_PIECES_P (len, align))
2078 data.constfun = constfun;
2079 data.constfundata = constfundata;
2082 store_by_pieces_1 (&data, align);
2093 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2094 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2096 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2099 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2106 to1 = adjust_address (data.to, QImode, data.offset);
2114 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2115 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2118 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2120 struct store_by_pieces data;
2125 data.constfun = clear_by_pieces_1;
2126 data.constfundata = NULL;
2129 store_by_pieces_1 (&data, align);
2132 /* Callback routine for clear_by_pieces.
2133 Return const0_rtx unconditionally. */
2136 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2137 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2138 enum machine_mode mode ATTRIBUTE_UNUSED)
2143 /* Subroutine of clear_by_pieces and store_by_pieces.
2144 Generate several move instructions to store LEN bytes of block TO. (A MEM
2145 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2148 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2149 unsigned int align ATTRIBUTE_UNUSED)
2151 rtx to_addr = XEXP (data->to, 0);
2152 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2153 enum machine_mode mode = VOIDmode, tmode;
2154 enum insn_code icode;
2157 data->to_addr = to_addr;
2159 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2160 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2162 data->explicit_inc_to = 0;
2164 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2166 data->offset = data->len;
2168 /* If storing requires more than two move insns,
2169 copy addresses to registers (to make displacements shorter)
2170 and use post-increment if available. */
2171 if (!data->autinc_to
2172 && move_by_pieces_ninsns (data->len, align) > 2)
2174 /* Determine the main mode we'll be using. */
2175 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2176 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2177 if (GET_MODE_SIZE (tmode) < max_size)
2180 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2182 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2183 data->autinc_to = 1;
2184 data->explicit_inc_to = -1;
2187 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2188 && ! data->autinc_to)
2190 data->to_addr = copy_addr_to_reg (to_addr);
2191 data->autinc_to = 1;
2192 data->explicit_inc_to = 1;
2195 if ( !data->autinc_to && CONSTANT_P (to_addr))
2196 data->to_addr = copy_addr_to_reg (to_addr);
2199 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2200 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2201 align = MOVE_MAX * BITS_PER_UNIT;
2203 /* First store what we can in the largest integer mode, then go to
2204 successively smaller modes. */
2206 while (max_size > 1)
2208 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2209 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2210 if (GET_MODE_SIZE (tmode) < max_size)
2213 if (mode == VOIDmode)
2216 icode = mov_optab->handlers[(int) mode].insn_code;
2217 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2218 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2220 max_size = GET_MODE_SIZE (mode);
2223 /* The code above should have handled everything. */
2228 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2229 with move instructions for mode MODE. GENFUN is the gen_... function
2230 to make a move insn for that mode. DATA has all the other info. */
2233 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2234 struct store_by_pieces *data)
2236 unsigned int size = GET_MODE_SIZE (mode);
2239 while (data->len >= size)
2242 data->offset -= size;
2244 if (data->autinc_to)
2245 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2248 to1 = adjust_address (data->to, mode, data->offset);
2250 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2251 emit_insn (gen_add2_insn (data->to_addr,
2252 GEN_INT (-(HOST_WIDE_INT) size)));
2254 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2255 emit_insn ((*genfun) (to1, cst));
2257 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2258 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2260 if (! data->reverse)
2261 data->offset += size;
2267 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2268 its length in bytes. */
2271 clear_storage (rtx object, rtx size)
2274 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2275 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2277 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2278 just move a zero. Otherwise, do this a piece at a time. */
2279 if (GET_MODE (object) != BLKmode
2280 && GET_CODE (size) == CONST_INT
2281 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2282 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2285 if (size == const0_rtx)
2287 else if (GET_CODE (size) == CONST_INT
2288 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2289 clear_by_pieces (object, INTVAL (size), align);
2290 else if (clear_storage_via_clrmem (object, size, align))
2293 retval = clear_storage_via_libcall (object, size);
2299 /* A subroutine of clear_storage. Expand a clrmem pattern;
2300 return true if successful. */
2303 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2305 /* Try the most limited insn first, because there's no point
2306 including more than one in the machine description unless
2307 the more limited one has some advantage. */
2309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2310 enum machine_mode mode;
2312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2313 mode = GET_MODE_WIDER_MODE (mode))
2315 enum insn_code code = clrmem_optab[(int) mode];
2316 insn_operand_predicate_fn pred;
2318 if (code != CODE_FOR_nothing
2319 /* We don't need MODE to be narrower than
2320 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2321 the mode mask, as it is returned by the macro, it will
2322 definitely be less than the actual mode mask. */
2323 && ((GET_CODE (size) == CONST_INT
2324 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2325 <= (GET_MODE_MASK (mode) >> 1)))
2326 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2327 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2328 || (*pred) (object, BLKmode))
2329 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2330 || (*pred) (opalign, VOIDmode)))
2333 rtx last = get_last_insn ();
2336 op1 = convert_to_mode (mode, size, 1);
2337 pred = insn_data[(int) code].operand[1].predicate;
2338 if (pred != 0 && ! (*pred) (op1, mode))
2339 op1 = copy_to_mode_reg (mode, op1);
2341 pat = GEN_FCN ((int) code) (object, op1, opalign);
2348 delete_insns_since (last);
2355 /* A subroutine of clear_storage. Expand a call to memset.
2356 Return the return value of memset, 0 otherwise. */
2359 clear_storage_via_libcall (rtx object, rtx size)
2361 tree call_expr, arg_list, fn, object_tree, size_tree;
2362 enum machine_mode size_mode;
2365 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2366 place those into new pseudos into a VAR_DECL and use them later. */
2368 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2370 size_mode = TYPE_MODE (sizetype);
2371 size = convert_to_mode (size_mode, size, 1);
2372 size = copy_to_mode_reg (size_mode, size);
2374 /* It is incorrect to use the libcall calling conventions to call
2375 memset in this context. This could be a user call to memset and
2376 the user may wish to examine the return value from memset. For
2377 targets where libcalls and normal calls have different conventions
2378 for returning pointers, we could end up generating incorrect code. */
2380 object_tree = make_tree (ptr_type_node, object);
2381 size_tree = make_tree (sizetype, size);
2383 fn = clear_storage_libcall_fn (true);
2384 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2385 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2386 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2388 /* Now we have to build up the CALL_EXPR itself. */
2389 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2390 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2391 call_expr, arg_list, NULL_TREE);
2393 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2395 /* If we are initializing a readonly value, show the above call
2396 clobbered it. Otherwise, a load from it may erroneously be
2397 hoisted from a loop. */
2398 if (RTX_UNCHANGING_P (object))
2399 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2404 /* A subroutine of clear_storage_via_libcall. Create the tree node
2405 for the function we use for block clears. The first time FOR_CALL
2406 is true, we call assemble_external. */
2408 static GTY(()) tree block_clear_fn;
2411 init_block_clear_fn (const char *asmspec)
2413 if (!block_clear_fn)
2417 fn = get_identifier ("memset");
2418 args = build_function_type_list (ptr_type_node, ptr_type_node,
2419 integer_type_node, sizetype,
2422 fn = build_decl (FUNCTION_DECL, fn, args);
2423 DECL_EXTERNAL (fn) = 1;
2424 TREE_PUBLIC (fn) = 1;
2425 DECL_ARTIFICIAL (fn) = 1;
2426 TREE_NOTHROW (fn) = 1;
2428 block_clear_fn = fn;
2432 set_user_assembler_name (block_clear_fn, asmspec);
2436 clear_storage_libcall_fn (int for_call)
2438 static bool emitted_extern;
2440 if (!block_clear_fn)
2441 init_block_clear_fn (NULL);
2443 if (for_call && !emitted_extern)
2445 emitted_extern = true;
2446 make_decl_rtl (block_clear_fn);
2447 assemble_external (block_clear_fn);
2450 return block_clear_fn;
2453 /* Generate code to copy Y into X.
2454 Both Y and X must have the same mode, except that
2455 Y can be a constant with VOIDmode.
2456 This mode cannot be BLKmode; use emit_block_move for that.
2458 Return the last instruction emitted. */
2461 emit_move_insn (rtx x, rtx y)
2463 enum machine_mode mode = GET_MODE (x);
2464 rtx y_cst = NULL_RTX;
2467 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2473 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2474 && (last_insn = compress_float_constant (x, y)))
2479 if (!LEGITIMATE_CONSTANT_P (y))
2481 y = force_const_mem (mode, y);
2483 /* If the target's cannot_force_const_mem prevented the spill,
2484 assume that the target's move expanders will also take care
2485 of the non-legitimate constant. */
2491 /* If X or Y are memory references, verify that their addresses are valid
2494 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2495 && ! push_operand (x, GET_MODE (x)))
2497 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2498 x = validize_mem (x);
2501 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2503 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2504 y = validize_mem (y);
2506 if (mode == BLKmode)
2509 last_insn = emit_move_insn_1 (x, y);
2511 if (y_cst && REG_P (x)
2512 && (set = single_set (last_insn)) != NULL_RTX
2513 && SET_DEST (set) == x
2514 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2515 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2520 /* Low level part of emit_move_insn.
2521 Called just like emit_move_insn, but assumes X and Y
2522 are basically valid. */
2525 emit_move_insn_1 (rtx x, rtx y)
2527 enum machine_mode mode = GET_MODE (x);
2528 enum machine_mode submode;
2529 enum mode_class class = GET_MODE_CLASS (mode);
2531 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2534 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2536 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2538 /* Expand complex moves by moving real part and imag part, if possible. */
2539 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2540 && BLKmode != (submode = GET_MODE_INNER (mode))
2541 && (mov_optab->handlers[(int) submode].insn_code
2542 != CODE_FOR_nothing))
2544 /* Don't split destination if it is a stack push. */
2545 int stack = push_operand (x, GET_MODE (x));
2547 #ifdef PUSH_ROUNDING
2548 /* In case we output to the stack, but the size is smaller than the
2549 machine can push exactly, we need to use move instructions. */
2551 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2552 != GET_MODE_SIZE (submode)))
2555 HOST_WIDE_INT offset1, offset2;
2557 /* Do not use anti_adjust_stack, since we don't want to update
2558 stack_pointer_delta. */
2559 temp = expand_binop (Pmode,
2560 #ifdef STACK_GROWS_DOWNWARD
2568 (GET_MODE_SIZE (GET_MODE (x)))),
2569 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2571 if (temp != stack_pointer_rtx)
2572 emit_move_insn (stack_pointer_rtx, temp);
2574 #ifdef STACK_GROWS_DOWNWARD
2576 offset2 = GET_MODE_SIZE (submode);
2578 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2579 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2580 + GET_MODE_SIZE (submode));
2583 emit_move_insn (change_address (x, submode,
2584 gen_rtx_PLUS (Pmode,
2586 GEN_INT (offset1))),
2587 gen_realpart (submode, y));
2588 emit_move_insn (change_address (x, submode,
2589 gen_rtx_PLUS (Pmode,
2591 GEN_INT (offset2))),
2592 gen_imagpart (submode, y));
2596 /* If this is a stack, push the highpart first, so it
2597 will be in the argument order.
2599 In that case, change_address is used only to convert
2600 the mode, not to change the address. */
2603 /* Note that the real part always precedes the imag part in memory
2604 regardless of machine's endianness. */
2605 #ifdef STACK_GROWS_DOWNWARD
2606 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2607 gen_imagpart (submode, y));
2608 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2609 gen_realpart (submode, y));
2611 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2612 gen_realpart (submode, y));
2613 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2614 gen_imagpart (submode, y));
2619 rtx realpart_x, realpart_y;
2620 rtx imagpart_x, imagpart_y;
2622 /* If this is a complex value with each part being smaller than a
2623 word, the usual calling sequence will likely pack the pieces into
2624 a single register. Unfortunately, SUBREG of hard registers only
2625 deals in terms of words, so we have a problem converting input
2626 arguments to the CONCAT of two registers that is used elsewhere
2627 for complex values. If this is before reload, we can copy it into
2628 memory and reload. FIXME, we should see about using extract and
2629 insert on integer registers, but complex short and complex char
2630 variables should be rarely used. */
2631 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2632 && (reload_in_progress | reload_completed) == 0)
2635 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2637 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2639 if (packed_dest_p || packed_src_p)
2641 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2642 ? MODE_FLOAT : MODE_INT);
2644 enum machine_mode reg_mode
2645 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2647 if (reg_mode != BLKmode)
2649 rtx mem = assign_stack_temp (reg_mode,
2650 GET_MODE_SIZE (mode), 0);
2651 rtx cmem = adjust_address (mem, mode, 0);
2655 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2657 emit_move_insn_1 (cmem, y);
2658 return emit_move_insn_1 (sreg, mem);
2662 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2664 emit_move_insn_1 (mem, sreg);
2665 return emit_move_insn_1 (x, cmem);
2671 realpart_x = gen_realpart (submode, x);
2672 realpart_y = gen_realpart (submode, y);
2673 imagpart_x = gen_imagpart (submode, x);
2674 imagpart_y = gen_imagpart (submode, y);
2676 /* Show the output dies here. This is necessary for SUBREGs
2677 of pseudos since we cannot track their lifetimes correctly;
2678 hard regs shouldn't appear here except as return values.
2679 We never want to emit such a clobber after reload. */
2681 && ! (reload_in_progress || reload_completed)
2682 && (GET_CODE (realpart_x) == SUBREG
2683 || GET_CODE (imagpart_x) == SUBREG))
2684 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2686 emit_move_insn (realpart_x, realpart_y);
2687 emit_move_insn (imagpart_x, imagpart_y);
2690 return get_last_insn ();
2693 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2694 find a mode to do it in. If we have a movcc, use it. Otherwise,
2695 find the MODE_INT mode of the same width. */
2696 else if (GET_MODE_CLASS (mode) == MODE_CC
2697 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2699 enum insn_code insn_code;
2700 enum machine_mode tmode = VOIDmode;
2704 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2707 for (tmode = QImode; tmode != VOIDmode;
2708 tmode = GET_MODE_WIDER_MODE (tmode))
2709 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2712 if (tmode == VOIDmode)
2715 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2716 may call change_address which is not appropriate if we were
2717 called when a reload was in progress. We don't have to worry
2718 about changing the address since the size in bytes is supposed to
2719 be the same. Copy the MEM to change the mode and move any
2720 substitutions from the old MEM to the new one. */
2722 if (reload_in_progress)
2724 x = gen_lowpart_common (tmode, x1);
2725 if (x == 0 && MEM_P (x1))
2727 x = adjust_address_nv (x1, tmode, 0);
2728 copy_replacements (x1, x);
2731 y = gen_lowpart_common (tmode, y1);
2732 if (y == 0 && MEM_P (y1))
2734 y = adjust_address_nv (y1, tmode, 0);
2735 copy_replacements (y1, y);
2740 x = gen_lowpart (tmode, x);
2741 y = gen_lowpart (tmode, y);
2744 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2745 return emit_insn (GEN_FCN (insn_code) (x, y));
2748 /* Try using a move pattern for the corresponding integer mode. This is
2749 only safe when simplify_subreg can convert MODE constants into integer
2750 constants. At present, it can only do this reliably if the value
2751 fits within a HOST_WIDE_INT. */
2752 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2753 && (submode = int_mode_for_mode (mode)) != BLKmode
2754 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2755 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2756 (simplify_gen_subreg (submode, x, mode, 0),
2757 simplify_gen_subreg (submode, y, mode, 0)));
2759 /* This will handle any multi-word or full-word mode that lacks a move_insn
2760 pattern. However, you will get better code if you define such patterns,
2761 even if they must turn into multiple assembler instructions. */
2762 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2769 #ifdef PUSH_ROUNDING
2771 /* If X is a push on the stack, do the push now and replace
2772 X with a reference to the stack pointer. */
2773 if (push_operand (x, GET_MODE (x)))
2778 /* Do not use anti_adjust_stack, since we don't want to update
2779 stack_pointer_delta. */
2780 temp = expand_binop (Pmode,
2781 #ifdef STACK_GROWS_DOWNWARD
2789 (GET_MODE_SIZE (GET_MODE (x)))),
2790 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2792 if (temp != stack_pointer_rtx)
2793 emit_move_insn (stack_pointer_rtx, temp);
2795 code = GET_CODE (XEXP (x, 0));
2797 /* Just hope that small offsets off SP are OK. */
2798 if (code == POST_INC)
2799 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2800 GEN_INT (-((HOST_WIDE_INT)
2801 GET_MODE_SIZE (GET_MODE (x)))));
2802 else if (code == POST_DEC)
2803 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2804 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2806 temp = stack_pointer_rtx;
2808 x = change_address (x, VOIDmode, temp);
2812 /* If we are in reload, see if either operand is a MEM whose address
2813 is scheduled for replacement. */
2814 if (reload_in_progress && MEM_P (x)
2815 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2816 x = replace_equiv_address_nv (x, inner);
2817 if (reload_in_progress && MEM_P (y)
2818 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2819 y = replace_equiv_address_nv (y, inner);
2825 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2828 rtx xpart = operand_subword (x, i, 1, mode);
2829 rtx ypart = operand_subword (y, i, 1, mode);
2831 /* If we can't get a part of Y, put Y into memory if it is a
2832 constant. Otherwise, force it into a register. If we still
2833 can't get a part of Y, abort. */
2834 if (ypart == 0 && CONSTANT_P (y))
2836 y = force_const_mem (mode, y);
2837 ypart = operand_subword (y, i, 1, mode);
2839 else if (ypart == 0)
2840 ypart = operand_subword_force (y, i, mode);
2842 if (xpart == 0 || ypart == 0)
2845 need_clobber |= (GET_CODE (xpart) == SUBREG);
2847 last_insn = emit_move_insn (xpart, ypart);
2853 /* Show the output dies here. This is necessary for SUBREGs
2854 of pseudos since we cannot track their lifetimes correctly;
2855 hard regs shouldn't appear here except as return values.
2856 We never want to emit such a clobber after reload. */
2858 && ! (reload_in_progress || reload_completed)
2859 && need_clobber != 0)
2860 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2870 /* If Y is representable exactly in a narrower mode, and the target can
2871 perform the extension directly from constant or memory, then emit the
2872 move as an extension. */
2875 compress_float_constant (rtx x, rtx y)
2877 enum machine_mode dstmode = GET_MODE (x);
2878 enum machine_mode orig_srcmode = GET_MODE (y);
2879 enum machine_mode srcmode;
2882 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2884 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2885 srcmode != orig_srcmode;
2886 srcmode = GET_MODE_WIDER_MODE (srcmode))
2889 rtx trunc_y, last_insn;
2891 /* Skip if the target can't extend this way. */
2892 ic = can_extend_p (dstmode, srcmode, 0);
2893 if (ic == CODE_FOR_nothing)
2896 /* Skip if the narrowed value isn't exact. */
2897 if (! exact_real_truncate (srcmode, &r))
2900 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2902 if (LEGITIMATE_CONSTANT_P (trunc_y))
2904 /* Skip if the target needs extra instructions to perform
2906 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2909 else if (float_extend_from_mem[dstmode][srcmode])
2910 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2914 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2915 last_insn = get_last_insn ();
2918 set_unique_reg_note (last_insn, REG_EQUAL, y);
2926 /* Pushing data onto the stack. */
2928 /* Push a block of length SIZE (perhaps variable)
2929 and return an rtx to address the beginning of the block.
2930 The value may be virtual_outgoing_args_rtx.
2932 EXTRA is the number of bytes of padding to push in addition to SIZE.
2933 BELOW nonzero means this padding comes at low addresses;
2934 otherwise, the padding comes at high addresses. */
2937 push_block (rtx size, int extra, int below)
2941 size = convert_modes (Pmode, ptr_mode, size, 1);
2942 if (CONSTANT_P (size))
2943 anti_adjust_stack (plus_constant (size, extra));
2944 else if (REG_P (size) && extra == 0)
2945 anti_adjust_stack (size);
2948 temp = copy_to_mode_reg (Pmode, size);
2950 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2951 temp, 0, OPTAB_LIB_WIDEN);
2952 anti_adjust_stack (temp);
2955 #ifndef STACK_GROWS_DOWNWARD
2961 temp = virtual_outgoing_args_rtx;
2962 if (extra != 0 && below)
2963 temp = plus_constant (temp, extra);
2967 if (GET_CODE (size) == CONST_INT)
2968 temp = plus_constant (virtual_outgoing_args_rtx,
2969 -INTVAL (size) - (below ? 0 : extra));
2970 else if (extra != 0 && !below)
2971 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2972 negate_rtx (Pmode, plus_constant (size, extra)));
2974 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2975 negate_rtx (Pmode, size));
2978 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2981 #ifdef PUSH_ROUNDING
2983 /* Emit single push insn. */
2986 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2989 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2991 enum insn_code icode;
2992 insn_operand_predicate_fn pred;
2994 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
2995 /* If there is push pattern, use it. Otherwise try old way of throwing
2996 MEM representing push operation to move expander. */
2997 icode = push_optab->handlers[(int) mode].insn_code;
2998 if (icode != CODE_FOR_nothing)
3000 if (((pred = insn_data[(int) icode].operand[0].predicate)
3001 && !((*pred) (x, mode))))
3002 x = force_reg (mode, x);
3003 emit_insn (GEN_FCN (icode) (x));
3006 if (GET_MODE_SIZE (mode) == rounded_size)
3007 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3008 /* If we are to pad downward, adjust the stack pointer first and
3009 then store X into the stack location using an offset. This is
3010 because emit_move_insn does not know how to pad; it does not have
3012 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3014 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3015 HOST_WIDE_INT offset;
3017 emit_move_insn (stack_pointer_rtx,
3018 expand_binop (Pmode,
3019 #ifdef STACK_GROWS_DOWNWARD
3025 GEN_INT (rounded_size),
3026 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3028 offset = (HOST_WIDE_INT) padding_size;
3029 #ifdef STACK_GROWS_DOWNWARD
3030 if (STACK_PUSH_CODE == POST_DEC)
3031 /* We have already decremented the stack pointer, so get the
3033 offset += (HOST_WIDE_INT) rounded_size;
3035 if (STACK_PUSH_CODE == POST_INC)
3036 /* We have already incremented the stack pointer, so get the
3038 offset -= (HOST_WIDE_INT) rounded_size;
3040 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3044 #ifdef STACK_GROWS_DOWNWARD
3045 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3046 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3047 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3049 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3050 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3051 GEN_INT (rounded_size));
3053 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3056 dest = gen_rtx_MEM (mode, dest_addr);
3060 set_mem_attributes (dest, type, 1);
3062 if (flag_optimize_sibling_calls)
3063 /* Function incoming arguments may overlap with sibling call
3064 outgoing arguments and we cannot allow reordering of reads
3065 from function arguments with stores to outgoing arguments
3066 of sibling calls. */
3067 set_mem_alias_set (dest, 0);
3069 emit_move_insn (dest, x);
3073 /* Generate code to push X onto the stack, assuming it has mode MODE and
3075 MODE is redundant except when X is a CONST_INT (since they don't
3077 SIZE is an rtx for the size of data to be copied (in bytes),
3078 needed only if X is BLKmode.
3080 ALIGN (in bits) is maximum alignment we can assume.
3082 If PARTIAL and REG are both nonzero, then copy that many of the first
3083 words of X into registers starting with REG, and push the rest of X.
3084 The amount of space pushed is decreased by PARTIAL words,
3085 rounded *down* to a multiple of PARM_BOUNDARY.
3086 REG must be a hard register in this case.
3087 If REG is zero but PARTIAL is not, take any all others actions for an
3088 argument partially in registers, but do not actually load any
3091 EXTRA is the amount in bytes of extra space to leave next to this arg.
3092 This is ignored if an argument block has already been allocated.
3094 On a machine that lacks real push insns, ARGS_ADDR is the address of
3095 the bottom of the argument block for this call. We use indexing off there
3096 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3097 argument block has not been preallocated.
3099 ARGS_SO_FAR is the size of args previously pushed for this call.
3101 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3102 for arguments passed in registers. If nonzero, it will be the number
3103 of bytes required. */
3106 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3107 unsigned int align, int partial, rtx reg, int extra,
3108 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3112 enum direction stack_direction
3113 #ifdef STACK_GROWS_DOWNWARD
3119 /* Decide where to pad the argument: `downward' for below,
3120 `upward' for above, or `none' for don't pad it.
3121 Default is below for small data on big-endian machines; else above. */
3122 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3124 /* Invert direction if stack is post-decrement.
3126 if (STACK_PUSH_CODE == POST_DEC)
3127 if (where_pad != none)
3128 where_pad = (where_pad == downward ? upward : downward);
3132 if (mode == BLKmode)
3134 /* Copy a block into the stack, entirely or partially. */
3137 int used = partial * UNITS_PER_WORD;
3141 if (reg && GET_CODE (reg) == PARALLEL)
3143 /* Use the size of the elt to compute offset. */
3144 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3145 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3146 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3149 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3156 /* USED is now the # of bytes we need not copy to the stack
3157 because registers will take care of them. */
3160 xinner = adjust_address (xinner, BLKmode, used);
3162 /* If the partial register-part of the arg counts in its stack size,
3163 skip the part of stack space corresponding to the registers.
3164 Otherwise, start copying to the beginning of the stack space,
3165 by setting SKIP to 0. */
3166 skip = (reg_parm_stack_space == 0) ? 0 : used;
3168 #ifdef PUSH_ROUNDING
3169 /* Do it with several push insns if that doesn't take lots of insns
3170 and if there is no difficulty with push insns that skip bytes
3171 on the stack for alignment purposes. */
3174 && GET_CODE (size) == CONST_INT
3176 && MEM_ALIGN (xinner) >= align
3177 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3178 /* Here we avoid the case of a structure whose weak alignment
3179 forces many pushes of a small amount of data,
3180 and such small pushes do rounding that causes trouble. */
3181 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3182 || align >= BIGGEST_ALIGNMENT
3183 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3184 == (align / BITS_PER_UNIT)))
3185 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3187 /* Push padding now if padding above and stack grows down,
3188 or if padding below and stack grows up.
3189 But if space already allocated, this has already been done. */
3190 if (extra && args_addr == 0
3191 && where_pad != none && where_pad != stack_direction)
3192 anti_adjust_stack (GEN_INT (extra));
3194 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3197 #endif /* PUSH_ROUNDING */
3201 /* Otherwise make space on the stack and copy the data
3202 to the address of that space. */
3204 /* Deduct words put into registers from the size we must copy. */
3207 if (GET_CODE (size) == CONST_INT)
3208 size = GEN_INT (INTVAL (size) - used);
3210 size = expand_binop (GET_MODE (size), sub_optab, size,
3211 GEN_INT (used), NULL_RTX, 0,
3215 /* Get the address of the stack space.
3216 In this case, we do not deal with EXTRA separately.
3217 A single stack adjust will do. */
3220 temp = push_block (size, extra, where_pad == downward);
3223 else if (GET_CODE (args_so_far) == CONST_INT)
3224 temp = memory_address (BLKmode,
3225 plus_constant (args_addr,
3226 skip + INTVAL (args_so_far)));
3228 temp = memory_address (BLKmode,
3229 plus_constant (gen_rtx_PLUS (Pmode,
3234 if (!ACCUMULATE_OUTGOING_ARGS)
3236 /* If the source is referenced relative to the stack pointer,
3237 copy it to another register to stabilize it. We do not need
3238 to do this if we know that we won't be changing sp. */
3240 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3241 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3242 temp = copy_to_reg (temp);
3245 target = gen_rtx_MEM (BLKmode, temp);
3247 /* We do *not* set_mem_attributes here, because incoming arguments
3248 may overlap with sibling call outgoing arguments and we cannot
3249 allow reordering of reads from function arguments with stores
3250 to outgoing arguments of sibling calls. We do, however, want
3251 to record the alignment of the stack slot. */
3252 /* ALIGN may well be better aligned than TYPE, e.g. due to
3253 PARM_BOUNDARY. Assume the caller isn't lying. */
3254 set_mem_align (target, align);
3256 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3259 else if (partial > 0)
3261 /* Scalar partly in registers. */
3263 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3266 /* # words of start of argument
3267 that we must make space for but need not store. */
3268 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3269 int args_offset = INTVAL (args_so_far);
3272 /* Push padding now if padding above and stack grows down,
3273 or if padding below and stack grows up.
3274 But if space already allocated, this has already been done. */
3275 if (extra && args_addr == 0
3276 && where_pad != none && where_pad != stack_direction)
3277 anti_adjust_stack (GEN_INT (extra));
3279 /* If we make space by pushing it, we might as well push
3280 the real data. Otherwise, we can leave OFFSET nonzero
3281 and leave the space uninitialized. */
3285 /* Now NOT_STACK gets the number of words that we don't need to
3286 allocate on the stack. */
3287 not_stack = partial - offset;
3289 /* If the partial register-part of the arg counts in its stack size,
3290 skip the part of stack space corresponding to the registers.
3291 Otherwise, start copying to the beginning of the stack space,
3292 by setting SKIP to 0. */
3293 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3295 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3296 x = validize_mem (force_const_mem (mode, x));
3298 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3299 SUBREGs of such registers are not allowed. */
3300 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3301 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3302 x = copy_to_reg (x);
3304 /* Loop over all the words allocated on the stack for this arg. */
3305 /* We can do it by words, because any scalar bigger than a word
3306 has a size a multiple of a word. */
3307 #ifndef PUSH_ARGS_REVERSED
3308 for (i = not_stack; i < size; i++)
3310 for (i = size - 1; i >= not_stack; i--)
3312 if (i >= not_stack + offset)
3313 emit_push_insn (operand_subword_force (x, i, mode),
3314 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3316 GEN_INT (args_offset + ((i - not_stack + skip)
3318 reg_parm_stack_space, alignment_pad);
3325 /* Push padding now if padding above and stack grows down,
3326 or if padding below and stack grows up.
3327 But if space already allocated, this has already been done. */
3328 if (extra && args_addr == 0
3329 && where_pad != none && where_pad != stack_direction)
3330 anti_adjust_stack (GEN_INT (extra));
3332 #ifdef PUSH_ROUNDING
3333 if (args_addr == 0 && PUSH_ARGS)
3334 emit_single_push_insn (mode, x, type);
3338 if (GET_CODE (args_so_far) == CONST_INT)
3340 = memory_address (mode,
3341 plus_constant (args_addr,
3342 INTVAL (args_so_far)));
3344 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3346 dest = gen_rtx_MEM (mode, addr);
3348 /* We do *not* set_mem_attributes here, because incoming arguments
3349 may overlap with sibling call outgoing arguments and we cannot
3350 allow reordering of reads from function arguments with stores
3351 to outgoing arguments of sibling calls. We do, however, want
3352 to record the alignment of the stack slot. */
3353 /* ALIGN may well be better aligned than TYPE, e.g. due to
3354 PARM_BOUNDARY. Assume the caller isn't lying. */
3355 set_mem_align (dest, align);
3357 emit_move_insn (dest, x);
3361 /* If part should go in registers, copy that part
3362 into the appropriate registers. Do this now, at the end,
3363 since mem-to-mem copies above may do function calls. */
3364 if (partial > 0 && reg != 0)
3366 /* Handle calls that pass values in multiple non-contiguous locations.
3367 The Irix 6 ABI has examples of this. */
3368 if (GET_CODE (reg) == PARALLEL)
3369 emit_group_load (reg, x, type, -1);
3371 move_block_to_reg (REGNO (reg), x, partial, mode);
3374 if (extra && args_addr == 0 && where_pad == stack_direction)
3375 anti_adjust_stack (GEN_INT (extra));
3377 if (alignment_pad && args_addr == 0)
3378 anti_adjust_stack (alignment_pad);
3381 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3385 get_subtarget (rtx x)
3388 /* Only registers can be subtargets. */
3390 /* If the register is readonly, it can't be set more than once. */
3391 || RTX_UNCHANGING_P (x)
3392 /* Don't use hard regs to avoid extending their life. */
3393 || REGNO (x) < FIRST_PSEUDO_REGISTER
3394 /* Avoid subtargets inside loops,
3395 since they hide some invariant expressions. */
3396 || preserve_subexpressions_p ())
3400 /* Expand an assignment that stores the value of FROM into TO.
3401 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3402 (If the value is constant, this rtx is a constant.)
3403 Otherwise, the returned value is NULL_RTX. */
3406 expand_assignment (tree to, tree from, int want_value)
3411 /* Don't crash if the lhs of the assignment was erroneous. */
3413 if (TREE_CODE (to) == ERROR_MARK)
3415 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3416 return want_value ? result : NULL_RTX;
3419 /* Assignment of a structure component needs special treatment
3420 if the structure component's rtx is not simply a MEM.
3421 Assignment of an array element at a constant index, and assignment of
3422 an array element in an unaligned packed structure field, has the same
3425 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3426 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3427 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3429 enum machine_mode mode1;
3430 HOST_WIDE_INT bitsize, bitpos;
3438 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3439 &unsignedp, &volatilep);
3441 /* If we are going to use store_bit_field and extract_bit_field,
3442 make sure to_rtx will be safe for multiple use. */
3444 if (mode1 == VOIDmode && want_value)
3445 tem = stabilize_reference (tem);
3447 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3451 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3453 if (!MEM_P (to_rtx))
3456 #ifdef POINTERS_EXTEND_UNSIGNED
3457 if (GET_MODE (offset_rtx) != Pmode)
3458 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3460 if (GET_MODE (offset_rtx) != ptr_mode)
3461 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3464 /* A constant address in TO_RTX can have VOIDmode, we must not try
3465 to call force_reg for that case. Avoid that case. */
3467 && GET_MODE (to_rtx) == BLKmode
3468 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3470 && (bitpos % bitsize) == 0
3471 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3472 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3474 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3478 to_rtx = offset_address (to_rtx, offset_rtx,
3479 highest_pow2_factor_for_target (to,
3485 /* If the field is at offset zero, we could have been given the
3486 DECL_RTX of the parent struct. Don't munge it. */
3487 to_rtx = shallow_copy_rtx (to_rtx);
3489 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3492 /* Deal with volatile and readonly fields. The former is only done
3493 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3494 if (volatilep && MEM_P (to_rtx))
3496 if (to_rtx == orig_to_rtx)
3497 to_rtx = copy_rtx (to_rtx);
3498 MEM_VOLATILE_P (to_rtx) = 1;
3501 if (TREE_CODE (to) == COMPONENT_REF
3502 && TREE_READONLY (TREE_OPERAND (to, 1))
3503 /* We can't assert that a MEM won't be set more than once
3504 if the component is not addressable because another
3505 non-addressable component may be referenced by the same MEM. */
3506 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3508 if (to_rtx == orig_to_rtx)
3509 to_rtx = copy_rtx (to_rtx);
3510 RTX_UNCHANGING_P (to_rtx) = 1;
3513 if (MEM_P (to_rtx) && ! can_address_p (to))
3515 if (to_rtx == orig_to_rtx)
3516 to_rtx = copy_rtx (to_rtx);
3517 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3520 /* Optimize bitfld op= val in certain cases. */
3521 while (mode1 == VOIDmode && !want_value
3522 && bitsize > 0 && bitsize < BITS_PER_WORD
3523 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3524 && !TREE_SIDE_EFFECTS (to)
3525 && !TREE_THIS_VOLATILE (to))
3528 rtx value, str_rtx = to_rtx;
3529 HOST_WIDE_INT bitpos1 = bitpos;
3534 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3535 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3538 op0 = TREE_OPERAND (src, 0);
3539 op1 = TREE_OPERAND (src, 1);
3542 if (! operand_equal_p (to, op0, 0))
3545 if (MEM_P (str_rtx))
3547 enum machine_mode mode = GET_MODE (str_rtx);
3548 HOST_WIDE_INT offset1;
3550 if (GET_MODE_BITSIZE (mode) == 0
3551 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3553 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3555 if (mode == VOIDmode)
3559 bitpos1 %= GET_MODE_BITSIZE (mode);
3560 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3561 str_rtx = adjust_address (str_rtx, mode, offset1);
3563 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3566 /* If the bit field covers the whole REG/MEM, store_field
3567 will likely generate better code. */
3568 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3571 /* We can't handle fields split accross multiple entities. */
3572 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3575 if (BYTES_BIG_ENDIAN)
3576 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3579 /* Special case some bitfield op= exp. */
3580 switch (TREE_CODE (src))
3584 /* For now, just optimize the case of the topmost bitfield
3585 where we don't need to do any masking and also
3586 1 bit bitfields where xor can be used.
3587 We might win by one instruction for the other bitfields
3588 too if insv/extv instructions aren't used, so that
3589 can be added later. */
3590 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3591 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3593 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3594 value = convert_modes (GET_MODE (str_rtx),
3595 TYPE_MODE (TREE_TYPE (op1)), value,
3596 TYPE_UNSIGNED (TREE_TYPE (op1)));
3598 /* We may be accessing data outside the field, which means
3599 we can alias adjacent data. */
3600 if (MEM_P (str_rtx))
3602 str_rtx = shallow_copy_rtx (str_rtx);
3603 set_mem_alias_set (str_rtx, 0);
3604 set_mem_expr (str_rtx, 0);
3607 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3609 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3611 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3615 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3616 value, build_int_2 (bitpos1, 0),
3618 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3619 value, str_rtx, 1, OPTAB_WIDEN);
3620 if (result != str_rtx)
3621 emit_move_insn (str_rtx, result);
3633 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3635 /* Spurious cast for HPUX compiler. */
3636 ? ((enum machine_mode)
3637 TYPE_MODE (TREE_TYPE (to)))
3639 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3641 preserve_temp_slots (result);
3645 /* If the value is meaningful, convert RESULT to the proper mode.
3646 Otherwise, return nothing. */
3647 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3648 TYPE_MODE (TREE_TYPE (from)),
3650 TYPE_UNSIGNED (TREE_TYPE (to)))
3654 /* If the rhs is a function call and its value is not an aggregate,
3655 call the function before we start to compute the lhs.
3656 This is needed for correct code for cases such as
3657 val = setjmp (buf) on machines where reference to val
3658 requires loading up part of an address in a separate insn.
3660 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3661 since it might be a promoted variable where the zero- or sign- extension
3662 needs to be done. Handling this in the normal way is safe because no
3663 computation is done before the call. */
3664 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3666 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3667 && REG_P (DECL_RTL (to))))
3672 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3674 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3676 /* Handle calls that return values in multiple non-contiguous locations.
3677 The Irix 6 ABI has examples of this. */
3678 if (GET_CODE (to_rtx) == PARALLEL)
3679 emit_group_load (to_rtx, value, TREE_TYPE (from),
3680 int_size_in_bytes (TREE_TYPE (from)));
3681 else if (GET_MODE (to_rtx) == BLKmode)
3682 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3685 if (POINTER_TYPE_P (TREE_TYPE (to)))
3686 value = convert_memory_address (GET_MODE (to_rtx), value);
3687 emit_move_insn (to_rtx, value);
3689 preserve_temp_slots (to_rtx);
3692 return want_value ? to_rtx : NULL_RTX;
3695 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3696 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3699 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3701 /* Don't move directly into a return register. */
3702 if (TREE_CODE (to) == RESULT_DECL
3703 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3708 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3710 if (GET_CODE (to_rtx) == PARALLEL)
3711 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3712 int_size_in_bytes (TREE_TYPE (from)));
3714 emit_move_insn (to_rtx, temp);
3716 preserve_temp_slots (to_rtx);
3719 return want_value ? to_rtx : NULL_RTX;
3722 /* In case we are returning the contents of an object which overlaps
3723 the place the value is being stored, use a safe function when copying
3724 a value through a pointer into a structure value return block. */
3725 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3726 && current_function_returns_struct
3727 && !current_function_returns_pcc_struct)
3732 size = expr_size (from);
3733 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 emit_library_call (memmove_libfunc, LCT_NORMAL,
3736 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3737 XEXP (from_rtx, 0), Pmode,
3738 convert_to_mode (TYPE_MODE (sizetype),
3739 size, TYPE_UNSIGNED (sizetype)),
3740 TYPE_MODE (sizetype));
3742 preserve_temp_slots (to_rtx);
3745 return want_value ? to_rtx : NULL_RTX;
3748 /* Compute FROM and store the value in the rtx we got. */
3751 result = store_expr (from, to_rtx, want_value);
3752 preserve_temp_slots (result);
3755 return want_value ? result : NULL_RTX;
3758 /* Generate code for computing expression EXP,
3759 and storing the value into TARGET.
3761 If WANT_VALUE & 1 is nonzero, return a copy of the value
3762 not in TARGET, so that we can be sure to use the proper
3763 value in a containing expression even if TARGET has something
3764 else stored in it. If possible, we copy the value through a pseudo
3765 and return that pseudo. Or, if the value is constant, we try to
3766 return the constant. In some cases, we return a pseudo
3767 copied *from* TARGET.
3769 If the mode is BLKmode then we may return TARGET itself.
3770 It turns out that in BLKmode it doesn't cause a problem.
3771 because C has no operators that could combine two different
3772 assignments into the same BLKmode object with different values
3773 with no sequence point. Will other languages need this to
3776 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3777 to catch quickly any cases where the caller uses the value
3778 and fails to set WANT_VALUE.
3780 If WANT_VALUE & 2 is set, this is a store into a call param on the
3781 stack, and block moves may need to be treated specially. */
3784 store_expr (tree exp, rtx target, int want_value)
3787 rtx alt_rtl = NULL_RTX;
3788 int dont_return_target = 0;
3789 int dont_store_target = 0;
3791 if (VOID_TYPE_P (TREE_TYPE (exp)))
3793 /* C++ can generate ?: expressions with a throw expression in one
3794 branch and an rvalue in the other. Here, we resolve attempts to
3795 store the throw expression's nonexistent result. */
3798 expand_expr (exp, const0_rtx, VOIDmode, 0);
3801 if (TREE_CODE (exp) == COMPOUND_EXPR)
3803 /* Perform first part of compound expression, then assign from second
3805 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3806 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3807 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3809 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3811 /* For conditional expression, get safe form of the target. Then
3812 test the condition, doing the appropriate assignment on either
3813 side. This avoids the creation of unnecessary temporaries.
3814 For non-BLKmode, it is more efficient not to do this. */
3816 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3818 do_pending_stack_adjust ();
3820 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3821 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3822 emit_jump_insn (gen_jump (lab2));
3825 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3829 return want_value & 1 ? target : NULL_RTX;
3831 else if ((want_value & 1) != 0
3833 && ! MEM_VOLATILE_P (target)
3834 && GET_MODE (target) != BLKmode)
3835 /* If target is in memory and caller wants value in a register instead,
3836 arrange that. Pass TARGET as target for expand_expr so that,
3837 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3838 We know expand_expr will not use the target in that case.
3839 Don't do this if TARGET is volatile because we are supposed
3840 to write it and then read it. */
3842 temp = expand_expr (exp, target, GET_MODE (target),
3843 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3844 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3846 /* If TEMP is already in the desired TARGET, only copy it from
3847 memory and don't store it there again. */
3849 || (rtx_equal_p (temp, target)
3850 && ! side_effects_p (temp) && ! side_effects_p (target)))
3851 dont_store_target = 1;
3852 temp = copy_to_reg (temp);
3854 dont_return_target = 1;
3856 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3857 /* If this is a scalar in a register that is stored in a wider mode
3858 than the declared mode, compute the result into its declared mode
3859 and then convert to the wider mode. Our value is the computed
3862 rtx inner_target = 0;
3864 /* If we don't want a value, we can do the conversion inside EXP,
3865 which will often result in some optimizations. Do the conversion
3866 in two steps: first change the signedness, if needed, then
3867 the extend. But don't do this if the type of EXP is a subtype
3868 of something else since then the conversion might involve
3869 more than just converting modes. */
3870 if ((want_value & 1) == 0
3871 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3872 && TREE_TYPE (TREE_TYPE (exp)) == 0
3873 && (!lang_hooks.reduce_bit_field_operations
3874 || (GET_MODE_PRECISION (GET_MODE (target))
3875 == TYPE_PRECISION (TREE_TYPE (exp)))))
3877 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3878 != SUBREG_PROMOTED_UNSIGNED_P (target))
3880 (lang_hooks.types.signed_or_unsigned_type
3881 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3883 exp = convert (lang_hooks.types.type_for_mode
3884 (GET_MODE (SUBREG_REG (target)),
3885 SUBREG_PROMOTED_UNSIGNED_P (target)),
3888 inner_target = SUBREG_REG (target);
3891 temp = expand_expr (exp, inner_target, VOIDmode,
3892 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3894 /* If TEMP is a MEM and we want a result value, make the access
3895 now so it gets done only once. Strictly speaking, this is
3896 only necessary if the MEM is volatile, or if the address
3897 overlaps TARGET. But not performing the load twice also
3898 reduces the amount of rtl we generate and then have to CSE. */
3899 if (MEM_P (temp) && (want_value & 1) != 0)
3900 temp = copy_to_reg (temp);
3902 /* If TEMP is a VOIDmode constant, use convert_modes to make
3903 sure that we properly convert it. */
3904 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3906 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3907 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3908 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3909 GET_MODE (target), temp,
3910 SUBREG_PROMOTED_UNSIGNED_P (target));
3913 convert_move (SUBREG_REG (target), temp,
3914 SUBREG_PROMOTED_UNSIGNED_P (target));
3916 /* If we promoted a constant, change the mode back down to match
3917 target. Otherwise, the caller might get confused by a result whose
3918 mode is larger than expected. */
3920 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3922 if (GET_MODE (temp) != VOIDmode)
3924 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3925 SUBREG_PROMOTED_VAR_P (temp) = 1;
3926 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3927 SUBREG_PROMOTED_UNSIGNED_P (target));
3930 temp = convert_modes (GET_MODE (target),
3931 GET_MODE (SUBREG_REG (target)),
3932 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3935 return want_value & 1 ? temp : NULL_RTX;
3939 temp = expand_expr_real (exp, target, GET_MODE (target),
3941 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3943 /* Return TARGET if it's a specified hardware register.
3944 If TARGET is a volatile mem ref, either return TARGET
3945 or return a reg copied *from* TARGET; ANSI requires this.
3947 Otherwise, if TEMP is not TARGET, return TEMP
3948 if it is constant (for efficiency),
3949 or if we really want the correct value. */
3950 if (!(target && REG_P (target)
3951 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3952 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3953 && ! rtx_equal_p (temp, target)
3954 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3955 dont_return_target = 1;
3958 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3959 the same as that of TARGET, adjust the constant. This is needed, for
3960 example, in case it is a CONST_DOUBLE and we want only a word-sized
3962 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3963 && TREE_CODE (exp) != ERROR_MARK
3964 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3965 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3966 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3968 /* If value was not generated in the target, store it there.
3969 Convert the value to TARGET's type first if necessary and emit the
3970 pending incrementations that have been queued when expanding EXP.
3971 Note that we cannot emit the whole queue blindly because this will
3972 effectively disable the POST_INC optimization later.
3974 If TEMP and TARGET compare equal according to rtx_equal_p, but
3975 one or both of them are volatile memory refs, we have to distinguish
3977 - expand_expr has used TARGET. In this case, we must not generate
3978 another copy. This can be detected by TARGET being equal according
3980 - expand_expr has not used TARGET - that means that the source just
3981 happens to have the same RTX form. Since temp will have been created
3982 by expand_expr, it will compare unequal according to == .
3983 We must generate a copy in this case, to reach the correct number
3984 of volatile memory references. */
3986 if ((! rtx_equal_p (temp, target)
3987 || (temp != target && (side_effects_p (temp)
3988 || side_effects_p (target))))
3989 && TREE_CODE (exp) != ERROR_MARK
3990 && ! dont_store_target
3991 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3992 but TARGET is not valid memory reference, TEMP will differ
3993 from TARGET although it is really the same location. */
3994 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3995 /* If there's nothing to copy, don't bother. Don't call expr_size
3996 unless necessary, because some front-ends (C++) expr_size-hook
3997 aborts on objects that are not supposed to be bit-copied or
3999 && expr_size (exp) != const0_rtx)
4001 if (GET_MODE (temp) != GET_MODE (target)
4002 && GET_MODE (temp) != VOIDmode)
4004 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4005 if (dont_return_target)
4007 /* In this case, we will return TEMP,
4008 so make sure it has the proper mode.
4009 But don't forget to store the value into TARGET. */
4010 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4011 emit_move_insn (target, temp);
4014 convert_move (target, temp, unsignedp);
4017 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4019 /* Handle copying a string constant into an array. The string
4020 constant may be shorter than the array. So copy just the string's
4021 actual length, and clear the rest. First get the size of the data
4022 type of the string, which is actually the size of the target. */
4023 rtx size = expr_size (exp);
4025 if (GET_CODE (size) == CONST_INT
4026 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4027 emit_block_move (target, temp, size,
4029 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4032 /* Compute the size of the data to copy from the string. */
4034 = size_binop (MIN_EXPR,
4035 make_tree (sizetype, size),
4036 size_int (TREE_STRING_LENGTH (exp)));
4038 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4040 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4043 /* Copy that much. */
4044 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4045 TYPE_UNSIGNED (sizetype));
4046 emit_block_move (target, temp, copy_size_rtx,
4048 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4050 /* Figure out how much is left in TARGET that we have to clear.
4051 Do all calculations in ptr_mode. */
4052 if (GET_CODE (copy_size_rtx) == CONST_INT)
4054 size = plus_constant (size, -INTVAL (copy_size_rtx));
4055 target = adjust_address (target, BLKmode,
4056 INTVAL (copy_size_rtx));
4060 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4061 copy_size_rtx, NULL_RTX, 0,
4064 #ifdef POINTERS_EXTEND_UNSIGNED
4065 if (GET_MODE (copy_size_rtx) != Pmode)
4066 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4067 TYPE_UNSIGNED (sizetype));
4070 target = offset_address (target, copy_size_rtx,
4071 highest_pow2_factor (copy_size));
4072 label = gen_label_rtx ();
4073 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4074 GET_MODE (size), 0, label);
4077 if (size != const0_rtx)
4078 clear_storage (target, size);
4084 /* Handle calls that return values in multiple non-contiguous locations.
4085 The Irix 6 ABI has examples of this. */
4086 else if (GET_CODE (target) == PARALLEL)
4087 emit_group_load (target, temp, TREE_TYPE (exp),
4088 int_size_in_bytes (TREE_TYPE (exp)));
4089 else if (GET_MODE (temp) == BLKmode)
4090 emit_block_move (target, temp, expr_size (exp),
4092 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4095 temp = force_operand (temp, target);
4097 emit_move_insn (target, temp);
4101 /* If we don't want a value, return NULL_RTX. */
4102 if ((want_value & 1) == 0)
4105 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4106 ??? The latter test doesn't seem to make sense. */
4107 else if (dont_return_target && !MEM_P (temp))
4110 /* Return TARGET itself if it is a hard register. */
4111 else if ((want_value & 1) != 0
4112 && GET_MODE (target) != BLKmode
4113 && ! (REG_P (target)
4114 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4115 return copy_to_reg (target);
4121 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4122 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4123 are set to non-constant values and place it in *P_NC_ELTS. */
4126 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4127 HOST_WIDE_INT *p_nc_elts)
4129 HOST_WIDE_INT nz_elts, nc_elts;
4135 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4137 tree value = TREE_VALUE (list);
4138 tree purpose = TREE_PURPOSE (list);
4142 if (TREE_CODE (purpose) == RANGE_EXPR)
4144 tree lo_index = TREE_OPERAND (purpose, 0);
4145 tree hi_index = TREE_OPERAND (purpose, 1);
4147 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4148 mult = (tree_low_cst (hi_index, 1)
4149 - tree_low_cst (lo_index, 1) + 1);
4152 switch (TREE_CODE (value))
4156 HOST_WIDE_INT nz = 0, nc = 0;
4157 categorize_ctor_elements_1 (value, &nz, &nc);
4158 nz_elts += mult * nz;
4159 nc_elts += mult * nc;
4165 if (!initializer_zerop (value))
4169 if (!initializer_zerop (TREE_REALPART (value)))
4171 if (!initializer_zerop (TREE_IMAGPART (value)))
4177 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4178 if (!initializer_zerop (TREE_VALUE (v)))
4185 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4191 *p_nz_elts += nz_elts;
4192 *p_nc_elts += nc_elts;
4196 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4197 HOST_WIDE_INT *p_nc_elts)
4201 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4204 /* Count the number of scalars in TYPE. Return -1 on overflow or
4208 count_type_elements (tree type)
4210 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4211 switch (TREE_CODE (type))
4215 tree telts = array_type_nelts (type);
4216 if (telts && host_integerp (telts, 1))
4218 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4219 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4222 else if (max / n > m)
4230 HOST_WIDE_INT n = 0, t;
4233 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4234 if (TREE_CODE (f) == FIELD_DECL)
4236 t = count_type_elements (TREE_TYPE (f));
4246 case QUAL_UNION_TYPE:
4248 /* Ho hum. How in the world do we guess here? Clearly it isn't
4249 right to count the fields. Guess based on the number of words. */
4250 HOST_WIDE_INT n = int_size_in_bytes (type);
4253 return n / UNITS_PER_WORD;
4260 return TYPE_VECTOR_SUBPARTS (type);
4269 case REFERENCE_TYPE:
4283 /* Return 1 if EXP contains mostly (3/4) zeros. */
4286 mostly_zeros_p (tree exp)
4288 if (TREE_CODE (exp) == CONSTRUCTOR)
4291 HOST_WIDE_INT nz_elts, nc_elts, elts;
4293 /* If there are no ranges of true bits, it is all zero. */
4294 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4295 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4297 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4298 elts = count_type_elements (TREE_TYPE (exp));
4300 return nz_elts < elts / 4;
4303 return initializer_zerop (exp);
4306 /* Helper function for store_constructor.
4307 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4308 TYPE is the type of the CONSTRUCTOR, not the element type.
4309 CLEARED is as for store_constructor.
4310 ALIAS_SET is the alias set to use for any stores.
4312 This provides a recursive shortcut back to store_constructor when it isn't
4313 necessary to go through store_field. This is so that we can pass through
4314 the cleared field to let store_constructor know that we may not have to
4315 clear a substructure if the outer structure has already been cleared. */
4318 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4319 HOST_WIDE_INT bitpos, enum machine_mode mode,
4320 tree exp, tree type, int cleared, int alias_set)
4322 if (TREE_CODE (exp) == CONSTRUCTOR
4323 /* We can only call store_constructor recursively if the size and
4324 bit position are on a byte boundary. */
4325 && bitpos % BITS_PER_UNIT == 0
4326 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4327 /* If we have a nonzero bitpos for a register target, then we just
4328 let store_field do the bitfield handling. This is unlikely to
4329 generate unnecessary clear instructions anyways. */
4330 && (bitpos == 0 || MEM_P (target)))
4334 = adjust_address (target,
4335 GET_MODE (target) == BLKmode
4337 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4338 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4341 /* Update the alias set, if required. */
4342 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4343 && MEM_ALIAS_SET (target) != 0)
4345 target = copy_rtx (target);
4346 set_mem_alias_set (target, alias_set);
4349 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4352 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4356 /* Store the value of constructor EXP into the rtx TARGET.
4357 TARGET is either a REG or a MEM; we know it cannot conflict, since
4358 safe_from_p has been called.
4359 CLEARED is true if TARGET is known to have been zero'd.
4360 SIZE is the number of bytes of TARGET we are allowed to modify: this
4361 may not be the same as the size of EXP if we are assigning to a field
4362 which has been packed to exclude padding bits. */
4365 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4367 tree type = TREE_TYPE (exp);
4368 #ifdef WORD_REGISTER_OPERATIONS
4369 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4372 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4373 || TREE_CODE (type) == QUAL_UNION_TYPE)
4377 /* If size is zero or the target is already cleared, do nothing. */
4378 if (size == 0 || cleared)
4380 /* We either clear the aggregate or indicate the value is dead. */
4381 else if ((TREE_CODE (type) == UNION_TYPE
4382 || TREE_CODE (type) == QUAL_UNION_TYPE)
4383 && ! CONSTRUCTOR_ELTS (exp))
4384 /* If the constructor is empty, clear the union. */
4386 clear_storage (target, expr_size (exp));
4390 /* If we are building a static constructor into a register,
4391 set the initial value as zero so we can fold the value into
4392 a constant. But if more than one register is involved,
4393 this probably loses. */
4394 else if (REG_P (target) && TREE_STATIC (exp)
4395 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4397 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4401 /* If the constructor has fewer fields than the structure
4402 or if we are initializing the structure to mostly zeros,
4403 clear the whole structure first. Don't do this if TARGET is a
4404 register whose mode size isn't equal to SIZE since clear_storage
4405 can't handle this case. */
4407 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4408 || mostly_zeros_p (exp))
4410 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4413 rtx xtarget = target;
4415 if (readonly_fields_p (type))
4417 xtarget = copy_rtx (xtarget);
4418 RTX_UNCHANGING_P (xtarget) = 1;
4421 clear_storage (xtarget, GEN_INT (size));
4426 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4428 /* Store each element of the constructor into
4429 the corresponding field of TARGET. */
4431 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4433 tree field = TREE_PURPOSE (elt);
4434 tree value = TREE_VALUE (elt);
4435 enum machine_mode mode;
4436 HOST_WIDE_INT bitsize;
4437 HOST_WIDE_INT bitpos = 0;
4439 rtx to_rtx = target;
4441 /* Just ignore missing fields.
4442 We cleared the whole structure, above,
4443 if any fields are missing. */
4447 if (cleared && initializer_zerop (value))
4450 if (host_integerp (DECL_SIZE (field), 1))
4451 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4455 mode = DECL_MODE (field);
4456 if (DECL_BIT_FIELD (field))
4459 offset = DECL_FIELD_OFFSET (field);
4460 if (host_integerp (offset, 0)
4461 && host_integerp (bit_position (field), 0))
4463 bitpos = int_bit_position (field);
4467 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4474 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4475 make_tree (TREE_TYPE (exp),
4478 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4479 if (!MEM_P (to_rtx))
4482 #ifdef POINTERS_EXTEND_UNSIGNED
4483 if (GET_MODE (offset_rtx) != Pmode)
4484 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4486 if (GET_MODE (offset_rtx) != ptr_mode)
4487 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4490 to_rtx = offset_address (to_rtx, offset_rtx,
4491 highest_pow2_factor (offset));
4494 if (TREE_READONLY (field))
4497 to_rtx = copy_rtx (to_rtx);
4499 RTX_UNCHANGING_P (to_rtx) = 1;
4502 #ifdef WORD_REGISTER_OPERATIONS
4503 /* If this initializes a field that is smaller than a word, at the
4504 start of a word, try to widen it to a full word.
4505 This special case allows us to output C++ member function
4506 initializations in a form that the optimizers can understand. */
4508 && bitsize < BITS_PER_WORD
4509 && bitpos % BITS_PER_WORD == 0
4510 && GET_MODE_CLASS (mode) == MODE_INT
4511 && TREE_CODE (value) == INTEGER_CST
4513 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4515 tree type = TREE_TYPE (value);
4517 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4519 type = lang_hooks.types.type_for_size
4520 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4521 value = convert (type, value);
4524 if (BYTES_BIG_ENDIAN)
4526 = fold (build2 (LSHIFT_EXPR, type, value,
4527 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4528 bitsize = BITS_PER_WORD;
4533 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4534 && DECL_NONADDRESSABLE_P (field))
4536 to_rtx = copy_rtx (to_rtx);
4537 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4540 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4541 value, type, cleared,
4542 get_alias_set (TREE_TYPE (field)));
4546 else if (TREE_CODE (type) == ARRAY_TYPE)
4552 tree elttype = TREE_TYPE (type);
4554 HOST_WIDE_INT minelt = 0;
4555 HOST_WIDE_INT maxelt = 0;
4557 domain = TYPE_DOMAIN (type);
4558 const_bounds_p = (TYPE_MIN_VALUE (domain)
4559 && TYPE_MAX_VALUE (domain)
4560 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4561 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4563 /* If we have constant bounds for the range of the type, get them. */
4566 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4567 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4570 /* If the constructor has fewer elements than the array,
4571 clear the whole array first. Similarly if this is
4572 static constructor of a non-BLKmode object. */
4575 else if (REG_P (target) && TREE_STATIC (exp))
4579 HOST_WIDE_INT count = 0, zero_count = 0;
4580 need_to_clear = ! const_bounds_p;
4582 /* This loop is a more accurate version of the loop in
4583 mostly_zeros_p (it handles RANGE_EXPR in an index).
4584 It is also needed to check for missing elements. */
4585 for (elt = CONSTRUCTOR_ELTS (exp);
4586 elt != NULL_TREE && ! need_to_clear;
4587 elt = TREE_CHAIN (elt))
4589 tree index = TREE_PURPOSE (elt);
4590 HOST_WIDE_INT this_node_count;
4592 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4594 tree lo_index = TREE_OPERAND (index, 0);
4595 tree hi_index = TREE_OPERAND (index, 1);
4597 if (! host_integerp (lo_index, 1)
4598 || ! host_integerp (hi_index, 1))
4604 this_node_count = (tree_low_cst (hi_index, 1)
4605 - tree_low_cst (lo_index, 1) + 1);
4608 this_node_count = 1;
4610 count += this_node_count;
4611 if (mostly_zeros_p (TREE_VALUE (elt)))
4612 zero_count += this_node_count;
4615 /* Clear the entire array first if there are any missing elements,
4616 or if the incidence of zero elements is >= 75%. */
4618 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4622 if (need_to_clear && size > 0)
4625 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4627 clear_storage (target, GEN_INT (size));
4631 if (!cleared && REG_P (target))
4632 /* Inform later passes that the old value is dead. */
4633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4635 /* Store each element of the constructor into
4636 the corresponding element of TARGET, determined
4637 by counting the elements. */
4638 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4640 elt = TREE_CHAIN (elt), i++)
4642 enum machine_mode mode;
4643 HOST_WIDE_INT bitsize;
4644 HOST_WIDE_INT bitpos;
4646 tree value = TREE_VALUE (elt);
4647 tree index = TREE_PURPOSE (elt);
4648 rtx xtarget = target;
4650 if (cleared && initializer_zerop (value))
4653 unsignedp = TYPE_UNSIGNED (elttype);
4654 mode = TYPE_MODE (elttype);
4655 if (mode == BLKmode)
4656 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4657 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4660 bitsize = GET_MODE_BITSIZE (mode);
4662 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4664 tree lo_index = TREE_OPERAND (index, 0);
4665 tree hi_index = TREE_OPERAND (index, 1);
4666 rtx index_r, pos_rtx;
4667 HOST_WIDE_INT lo, hi, count;
4670 /* If the range is constant and "small", unroll the loop. */
4672 && host_integerp (lo_index, 0)
4673 && host_integerp (hi_index, 0)
4674 && (lo = tree_low_cst (lo_index, 0),
4675 hi = tree_low_cst (hi_index, 0),
4676 count = hi - lo + 1,
4679 || (host_integerp (TYPE_SIZE (elttype), 1)
4680 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4683 lo -= minelt; hi -= minelt;
4684 for (; lo <= hi; lo++)
4686 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4689 && !MEM_KEEP_ALIAS_SET_P (target)
4690 && TREE_CODE (type) == ARRAY_TYPE
4691 && TYPE_NONALIASED_COMPONENT (type))
4693 target = copy_rtx (target);
4694 MEM_KEEP_ALIAS_SET_P (target) = 1;
4697 store_constructor_field
4698 (target, bitsize, bitpos, mode, value, type, cleared,
4699 get_alias_set (elttype));
4704 rtx loop_start = gen_label_rtx ();
4705 rtx loop_end = gen_label_rtx ();
4708 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4709 unsignedp = TYPE_UNSIGNED (domain);
4711 index = build_decl (VAR_DECL, NULL_TREE, domain);
4714 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4716 SET_DECL_RTL (index, index_r);
4717 store_expr (lo_index, index_r, 0);
4719 /* Build the head of the loop. */
4720 do_pending_stack_adjust ();
4721 emit_label (loop_start);
4723 /* Assign value to element index. */
4725 = convert (ssizetype,
4726 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4727 index, TYPE_MIN_VALUE (domain))));
4728 position = size_binop (MULT_EXPR, position,
4730 TYPE_SIZE_UNIT (elttype)));
4732 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4733 xtarget = offset_address (target, pos_rtx,
4734 highest_pow2_factor (position));
4735 xtarget = adjust_address (xtarget, mode, 0);
4736 if (TREE_CODE (value) == CONSTRUCTOR)
4737 store_constructor (value, xtarget, cleared,
4738 bitsize / BITS_PER_UNIT);
4740 store_expr (value, xtarget, 0);
4742 /* Generate a conditional jump to exit the loop. */
4743 exit_cond = build2 (LT_EXPR, integer_type_node,
4745 jumpif (exit_cond, loop_end);
4747 /* Update the loop counter, and jump to the head of
4749 expand_assignment (index,
4750 build2 (PLUS_EXPR, TREE_TYPE (index),
4751 index, integer_one_node), 0);
4753 emit_jump (loop_start);
4755 /* Build the end of the loop. */
4756 emit_label (loop_end);
4759 else if ((index != 0 && ! host_integerp (index, 0))
4760 || ! host_integerp (TYPE_SIZE (elttype), 1))
4765 index = ssize_int (1);
4768 index = fold_convert (ssizetype,
4769 fold (build2 (MINUS_EXPR,
4772 TYPE_MIN_VALUE (domain))));
4774 position = size_binop (MULT_EXPR, index,
4776 TYPE_SIZE_UNIT (elttype)));
4777 xtarget = offset_address (target,
4778 expand_expr (position, 0, VOIDmode, 0),
4779 highest_pow2_factor (position));
4780 xtarget = adjust_address (xtarget, mode, 0);
4781 store_expr (value, xtarget, 0);
4786 bitpos = ((tree_low_cst (index, 0) - minelt)
4787 * tree_low_cst (TYPE_SIZE (elttype), 1));
4789 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4791 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4792 && TREE_CODE (type) == ARRAY_TYPE
4793 && TYPE_NONALIASED_COMPONENT (type))
4795 target = copy_rtx (target);
4796 MEM_KEEP_ALIAS_SET_P (target) = 1;
4798 store_constructor_field (target, bitsize, bitpos, mode, value,
4799 type, cleared, get_alias_set (elttype));
4804 else if (TREE_CODE (type) == VECTOR_TYPE)
4810 tree elttype = TREE_TYPE (type);
4811 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4812 enum machine_mode eltmode = TYPE_MODE (elttype);
4813 HOST_WIDE_INT bitsize;
4814 HOST_WIDE_INT bitpos;
4818 if (eltmode == BLKmode)
4821 n_elts = TYPE_VECTOR_SUBPARTS (type);
4822 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4824 enum machine_mode mode = GET_MODE (target);
4826 icode = (int) vec_init_optab->handlers[mode].insn_code;
4827 if (icode != CODE_FOR_nothing)
4831 vector = alloca (n_elts);
4832 for (i = 0; i < n_elts; i++)
4833 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4837 /* If the constructor has fewer elements than the vector,
4838 clear the whole array first. Similarly if this is
4839 static constructor of a non-BLKmode object. */
4842 else if (REG_P (target) && TREE_STATIC (exp))
4846 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4848 for (elt = CONSTRUCTOR_ELTS (exp);
4850 elt = TREE_CHAIN (elt))
4854 int_const_binop (TRUNC_DIV_EXPR,
4855 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4856 TYPE_SIZE (elttype), 0), 1);
4858 count += n_elts_here;
4859 if (mostly_zeros_p (TREE_VALUE (elt)))
4860 zero_count += n_elts_here;
4863 /* Clear the entire vector first if there are any missing elements,
4864 or if the incidence of zero elements is >= 75%. */
4865 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4868 if (need_to_clear && size > 0 && !vector)
4871 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4873 clear_storage (target, GEN_INT (size));
4877 if (!cleared && REG_P (target))
4878 /* Inform later passes that the old value is dead. */
4879 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4881 /* Store each element of the constructor into the corresponding
4882 element of TARGET, determined by counting the elements. */
4883 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4885 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4887 tree value = TREE_VALUE (elt);
4888 tree index = TREE_PURPOSE (elt);
4889 HOST_WIDE_INT eltpos;
4891 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4892 if (cleared && initializer_zerop (value))
4896 eltpos = tree_low_cst (index, 1);
4902 /* Vector CONSTRUCTORs should only be built from smaller
4903 vectors in the case of BLKmode vectors. */
4904 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4906 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4910 enum machine_mode value_mode =
4911 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4912 ? TYPE_MODE (TREE_TYPE (value))
4914 bitpos = eltpos * elt_size;
4915 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4916 type, cleared, get_alias_set (elttype));
4921 emit_insn (GEN_FCN (icode) (target,
4922 gen_rtx_PARALLEL (GET_MODE (target),
4923 gen_rtvec_v (n_elts, vector))));
4926 /* Set constructor assignments. */
4927 else if (TREE_CODE (type) == SET_TYPE)
4929 tree elt = CONSTRUCTOR_ELTS (exp);
4930 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4931 tree domain = TYPE_DOMAIN (type);
4932 tree domain_min, domain_max, bitlength;
4934 /* The default implementation strategy is to extract the constant
4935 parts of the constructor, use that to initialize the target,
4936 and then "or" in whatever non-constant ranges we need in addition.
4938 If a large set is all zero or all ones, it is
4939 probably better to set it using memset.
4940 Also, if a large set has just a single range, it may also be
4941 better to first clear all the first clear the set (using
4942 memset), and set the bits we want. */
4944 /* Check for all zeros. */
4945 if (elt == NULL_TREE && size > 0)
4948 clear_storage (target, GEN_INT (size));
4952 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4953 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4954 bitlength = size_binop (PLUS_EXPR,
4955 size_diffop (domain_max, domain_min),
4958 nbits = tree_low_cst (bitlength, 1);
4960 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4961 are "complicated" (more than one range), initialize (the
4962 constant parts) by copying from a constant. */
4963 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4964 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4966 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4967 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4968 char *bit_buffer = alloca (nbits);
4969 HOST_WIDE_INT word = 0;
4970 unsigned int bit_pos = 0;
4971 unsigned int ibit = 0;
4972 unsigned int offset = 0; /* In bytes from beginning of set. */
4974 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4977 if (bit_buffer[ibit])
4979 if (BYTES_BIG_ENDIAN)
4980 word |= (1 << (set_word_size - 1 - bit_pos));
4982 word |= 1 << bit_pos;
4986 if (bit_pos >= set_word_size || ibit == nbits)
4988 if (word != 0 || ! cleared)
4990 rtx datum = gen_int_mode (word, mode);
4993 /* The assumption here is that it is safe to use
4994 XEXP if the set is multi-word, but not if
4995 it's single-word. */
4997 to_rtx = adjust_address (target, mode, offset);
4998 else if (offset == 0)
5002 emit_move_insn (to_rtx, datum);
5009 offset += set_word_size / BITS_PER_UNIT;
5014 /* Don't bother clearing storage if the set is all ones. */
5015 if (TREE_CHAIN (elt) != NULL_TREE
5016 || (TREE_PURPOSE (elt) == NULL_TREE
5018 : ( ! host_integerp (TREE_VALUE (elt), 0)
5019 || ! host_integerp (TREE_PURPOSE (elt), 0)
5020 || (tree_low_cst (TREE_VALUE (elt), 0)
5021 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5022 != (HOST_WIDE_INT) nbits))))
5023 clear_storage (target, expr_size (exp));
5025 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5027 /* Start of range of element or NULL. */
5028 tree startbit = TREE_PURPOSE (elt);
5029 /* End of range of element, or element value. */
5030 tree endbit = TREE_VALUE (elt);
5031 HOST_WIDE_INT startb, endb;
5032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5034 bitlength_rtx = expand_expr (bitlength,
5035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5037 /* Handle non-range tuple element like [ expr ]. */
5038 if (startbit == NULL_TREE)
5040 startbit = save_expr (endbit);
5044 startbit = convert (sizetype, startbit);
5045 endbit = convert (sizetype, endbit);
5046 if (! integer_zerop (domain_min))
5048 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5049 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5051 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5052 EXPAND_CONST_ADDRESS);
5053 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5054 EXPAND_CONST_ADDRESS);
5060 ((build_qualified_type (lang_hooks.types.type_for_mode
5061 (GET_MODE (target), 0),
5064 emit_move_insn (targetx, target);
5067 else if (MEM_P (target))
5072 /* Optimization: If startbit and endbit are constants divisible
5073 by BITS_PER_UNIT, call memset instead. */
5074 if (TREE_CODE (startbit) == INTEGER_CST
5075 && TREE_CODE (endbit) == INTEGER_CST
5076 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5077 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5079 emit_library_call (memset_libfunc, LCT_NORMAL,
5081 plus_constant (XEXP (targetx, 0),
5082 startb / BITS_PER_UNIT),
5084 constm1_rtx, TYPE_MODE (integer_type_node),
5085 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5086 TYPE_MODE (sizetype));
5089 emit_library_call (setbits_libfunc, LCT_NORMAL,
5090 VOIDmode, 4, XEXP (targetx, 0),
5091 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5092 startbit_rtx, TYPE_MODE (sizetype),
5093 endbit_rtx, TYPE_MODE (sizetype));
5096 emit_move_insn (target, targetx);
5104 /* Store the value of EXP (an expression tree)
5105 into a subfield of TARGET which has mode MODE and occupies
5106 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5107 If MODE is VOIDmode, it means that we are storing into a bit-field.
5109 If VALUE_MODE is VOIDmode, return nothing in particular.
5110 UNSIGNEDP is not used in this case.
5112 Otherwise, return an rtx for the value stored. This rtx
5113 has mode VALUE_MODE if that is convenient to do.
5114 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5116 TYPE is the type of the underlying object,
5118 ALIAS_SET is the alias set for the destination. This value will
5119 (in general) be different from that for TARGET, since TARGET is a
5120 reference to the containing structure. */
5123 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5124 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5125 int unsignedp, tree type, int alias_set)
5127 HOST_WIDE_INT width_mask = 0;
5129 if (TREE_CODE (exp) == ERROR_MARK)
5132 /* If we have nothing to store, do nothing unless the expression has
5135 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5136 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5137 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5139 /* If we are storing into an unaligned field of an aligned union that is
5140 in a register, we may have the mode of TARGET being an integer mode but
5141 MODE == BLKmode. In that case, get an aligned object whose size and
5142 alignment are the same as TARGET and store TARGET into it (we can avoid
5143 the store if the field being stored is the entire width of TARGET). Then
5144 call ourselves recursively to store the field into a BLKmode version of
5145 that object. Finally, load from the object into TARGET. This is not
5146 very efficient in general, but should only be slightly more expensive
5147 than the otherwise-required unaligned accesses. Perhaps this can be
5148 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5149 twice, once with emit_move_insn and once via store_field. */
5152 && (REG_P (target) || GET_CODE (target) == SUBREG))
5154 rtx object = assign_temp (type, 0, 1, 1);
5155 rtx blk_object = adjust_address (object, BLKmode, 0);
5157 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5158 emit_move_insn (object, target);
5160 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5163 emit_move_insn (target, object);
5165 /* We want to return the BLKmode version of the data. */
5169 if (GET_CODE (target) == CONCAT)
5171 /* We're storing into a struct containing a single __complex. */
5175 return store_expr (exp, target, value_mode != VOIDmode);
5178 /* If the structure is in a register or if the component
5179 is a bit field, we cannot use addressing to access it.
5180 Use bit-field techniques or SUBREG to store in it. */
5182 if (mode == VOIDmode
5183 || (mode != BLKmode && ! direct_store[(int) mode]
5184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5185 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5187 || GET_CODE (target) == SUBREG
5188 /* If the field isn't aligned enough to store as an ordinary memref,
5189 store it as a bit field. */
5191 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5192 || bitpos % GET_MODE_ALIGNMENT (mode))
5193 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5194 || (bitpos % BITS_PER_UNIT != 0)))
5195 /* If the RHS and field are a constant size and the size of the
5196 RHS isn't the same size as the bitfield, we must use bitfield
5199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5200 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5202 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5204 /* If BITSIZE is narrower than the size of the type of EXP
5205 we will be narrowing TEMP. Normally, what's wanted are the
5206 low-order bits. However, if EXP's type is a record and this is
5207 big-endian machine, we want the upper BITSIZE bits. */
5208 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5209 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5210 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5211 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5212 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5216 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5218 if (mode != VOIDmode && mode != BLKmode
5219 && mode != TYPE_MODE (TREE_TYPE (exp)))
5220 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5222 /* If the modes of TARGET and TEMP are both BLKmode, both
5223 must be in memory and BITPOS must be aligned on a byte
5224 boundary. If so, we simply do a block copy. */
5225 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5227 if (!MEM_P (target) || !MEM_P (temp)
5228 || bitpos % BITS_PER_UNIT != 0)
5231 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5232 emit_block_move (target, temp,
5233 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5237 return value_mode == VOIDmode ? const0_rtx : target;
5240 /* Store the value in the bitfield. */
5241 store_bit_field (target, bitsize, bitpos, mode, temp);
5243 if (value_mode != VOIDmode)
5245 /* The caller wants an rtx for the value.
5246 If possible, avoid refetching from the bitfield itself. */
5248 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5251 enum machine_mode tmode;
5253 tmode = GET_MODE (temp);
5254 if (tmode == VOIDmode)
5258 return expand_and (tmode, temp,
5259 gen_int_mode (width_mask, tmode),
5262 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5263 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5264 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5267 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5268 NULL_RTX, value_mode, VOIDmode);
5274 rtx addr = XEXP (target, 0);
5275 rtx to_rtx = target;
5277 /* If a value is wanted, it must be the lhs;
5278 so make the address stable for multiple use. */
5280 if (value_mode != VOIDmode && !REG_P (addr)
5281 && ! CONSTANT_ADDRESS_P (addr)
5282 /* A frame-pointer reference is already stable. */
5283 && ! (GET_CODE (addr) == PLUS
5284 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5285 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5286 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5287 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5289 /* Now build a reference to just the desired component. */
5291 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5293 if (to_rtx == target)
5294 to_rtx = copy_rtx (to_rtx);
5296 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5297 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5298 set_mem_alias_set (to_rtx, alias_set);
5300 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5304 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5305 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5306 codes and find the ultimate containing object, which we return.
5308 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5309 bit position, and *PUNSIGNEDP to the signedness of the field.
5310 If the position of the field is variable, we store a tree
5311 giving the variable offset (in units) in *POFFSET.
5312 This offset is in addition to the bit position.
5313 If the position is not variable, we store 0 in *POFFSET.
5315 If any of the extraction expressions is volatile,
5316 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5318 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5319 is a mode that can be used to access the field. In that case, *PBITSIZE
5322 If the field describes a variable-sized object, *PMODE is set to
5323 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5324 this case, but the address of the object can be found. */
5327 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5328 HOST_WIDE_INT *pbitpos, tree *poffset,
5329 enum machine_mode *pmode, int *punsignedp,
5333 enum machine_mode mode = VOIDmode;
5334 tree offset = size_zero_node;
5335 tree bit_offset = bitsize_zero_node;
5338 /* First get the mode, signedness, and size. We do this from just the
5339 outermost expression. */
5340 if (TREE_CODE (exp) == COMPONENT_REF)
5342 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5343 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5344 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5346 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5348 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5350 size_tree = TREE_OPERAND (exp, 1);
5351 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5355 mode = TYPE_MODE (TREE_TYPE (exp));
5356 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5358 if (mode == BLKmode)
5359 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5361 *pbitsize = GET_MODE_BITSIZE (mode);
5366 if (! host_integerp (size_tree, 1))
5367 mode = BLKmode, *pbitsize = -1;
5369 *pbitsize = tree_low_cst (size_tree, 1);
5372 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5373 and find the ultimate containing object. */
5376 if (TREE_CODE (exp) == BIT_FIELD_REF)
5377 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5378 else if (TREE_CODE (exp) == COMPONENT_REF)
5380 tree field = TREE_OPERAND (exp, 1);
5381 tree this_offset = component_ref_field_offset (exp);
5383 /* If this field hasn't been filled in yet, don't go
5384 past it. This should only happen when folding expressions
5385 made during type construction. */
5386 if (this_offset == 0)
5389 offset = size_binop (PLUS_EXPR, offset, this_offset);
5390 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5391 DECL_FIELD_BIT_OFFSET (field));
5393 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5396 else if (TREE_CODE (exp) == ARRAY_REF
5397 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5399 tree index = TREE_OPERAND (exp, 1);
5400 tree low_bound = array_ref_low_bound (exp);
5401 tree unit_size = array_ref_element_size (exp);
5403 /* We assume all arrays have sizes that are a multiple of a byte.
5404 First subtract the lower bound, if any, in the type of the
5405 index, then convert to sizetype and multiply by the size of the
5407 if (! integer_zerop (low_bound))
5408 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5411 offset = size_binop (PLUS_EXPR, offset,
5412 size_binop (MULT_EXPR,
5413 convert (sizetype, index),
5417 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5418 conversions that don't change the mode, and all view conversions
5419 except those that need to "step up" the alignment. */
5420 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5421 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5422 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5423 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5425 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5426 < BIGGEST_ALIGNMENT)
5427 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5428 || TYPE_ALIGN_OK (TREE_TYPE
5429 (TREE_OPERAND (exp, 0))))))
5430 && ! ((TREE_CODE (exp) == NOP_EXPR
5431 || TREE_CODE (exp) == CONVERT_EXPR)
5432 && (TYPE_MODE (TREE_TYPE (exp))
5433 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5436 /* If any reference in the chain is volatile, the effect is volatile. */
5437 if (TREE_THIS_VOLATILE (exp))
5440 exp = TREE_OPERAND (exp, 0);
5443 /* If OFFSET is constant, see if we can return the whole thing as a
5444 constant bit position. Otherwise, split it up. */
5445 if (host_integerp (offset, 0)
5446 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5448 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5449 && host_integerp (tem, 0))
5450 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5452 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5458 /* Return a tree of sizetype representing the size, in bytes, of the element
5459 of EXP, an ARRAY_REF. */
5462 array_ref_element_size (tree exp)
5464 tree aligned_size = TREE_OPERAND (exp, 3);
5465 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5467 /* If a size was specified in the ARRAY_REF, it's the size measured
5468 in alignment units of the element type. So multiply by that value. */
5470 return size_binop (MULT_EXPR, aligned_size,
5471 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5473 /* Otherwise, take the size from that of the element type. Substitute
5474 any PLACEHOLDER_EXPR that we have. */
5476 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5479 /* Return a tree representing the lower bound of the array mentioned in
5480 EXP, an ARRAY_REF. */
5483 array_ref_low_bound (tree exp)
5485 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5487 /* If a lower bound is specified in EXP, use it. */
5488 if (TREE_OPERAND (exp, 2))
5489 return TREE_OPERAND (exp, 2);
5491 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5492 substituting for a PLACEHOLDER_EXPR as needed. */
5493 if (domain_type && TYPE_MIN_VALUE (domain_type))
5494 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5496 /* Otherwise, return a zero of the appropriate type. */
5497 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5500 /* Return a tree representing the upper bound of the array mentioned in
5501 EXP, an ARRAY_REF. */
5504 array_ref_up_bound (tree exp)
5506 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5508 /* If there is a domain type and it has an upper bound, use it, substituting
5509 for a PLACEHOLDER_EXPR as needed. */
5510 if (domain_type && TYPE_MAX_VALUE (domain_type))
5511 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5513 /* Otherwise fail. */
5517 /* Return a tree representing the offset, in bytes, of the field referenced
5518 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5521 component_ref_field_offset (tree exp)
5523 tree aligned_offset = TREE_OPERAND (exp, 2);
5524 tree field = TREE_OPERAND (exp, 1);
5526 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5527 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5530 return size_binop (MULT_EXPR, aligned_offset,
5531 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5533 /* Otherwise, take the offset from that of the field. Substitute
5534 any PLACEHOLDER_EXPR that we have. */
5536 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5539 /* Return 1 if T is an expression that get_inner_reference handles. */
5542 handled_component_p (tree t)
5544 switch (TREE_CODE (t))
5549 case ARRAY_RANGE_REF:
5550 case NON_LVALUE_EXPR:
5551 case VIEW_CONVERT_EXPR:
5554 /* ??? Sure they are handled, but get_inner_reference may return
5555 a different PBITSIZE, depending upon whether the expression is
5556 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5559 return (TYPE_MODE (TREE_TYPE (t))
5560 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5567 /* Given an rtx VALUE that may contain additions and multiplications, return
5568 an equivalent value that just refers to a register, memory, or constant.
5569 This is done by generating instructions to perform the arithmetic and
5570 returning a pseudo-register containing the value.
5572 The returned value may be a REG, SUBREG, MEM or constant. */
5575 force_operand (rtx value, rtx target)
5578 /* Use subtarget as the target for operand 0 of a binary operation. */
5579 rtx subtarget = get_subtarget (target);
5580 enum rtx_code code = GET_CODE (value);
5582 /* Check for subreg applied to an expression produced by loop optimizer. */
5584 && !REG_P (SUBREG_REG (value))
5585 && !MEM_P (SUBREG_REG (value)))
5587 value = simplify_gen_subreg (GET_MODE (value),
5588 force_reg (GET_MODE (SUBREG_REG (value)),
5589 force_operand (SUBREG_REG (value),
5591 GET_MODE (SUBREG_REG (value)),
5592 SUBREG_BYTE (value));
5593 code = GET_CODE (value);
5596 /* Check for a PIC address load. */
5597 if ((code == PLUS || code == MINUS)
5598 && XEXP (value, 0) == pic_offset_table_rtx
5599 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5600 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5601 || GET_CODE (XEXP (value, 1)) == CONST))
5604 subtarget = gen_reg_rtx (GET_MODE (value));
5605 emit_move_insn (subtarget, value);
5609 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5612 target = gen_reg_rtx (GET_MODE (value));
5613 convert_move (target, force_operand (XEXP (value, 0), NULL),
5614 code == ZERO_EXTEND);
5618 if (ARITHMETIC_P (value))
5620 op2 = XEXP (value, 1);
5621 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5623 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5626 op2 = negate_rtx (GET_MODE (value), op2);
5629 /* Check for an addition with OP2 a constant integer and our first
5630 operand a PLUS of a virtual register and something else. In that
5631 case, we want to emit the sum of the virtual register and the
5632 constant first and then add the other value. This allows virtual
5633 register instantiation to simply modify the constant rather than
5634 creating another one around this addition. */
5635 if (code == PLUS && GET_CODE (op2) == CONST_INT
5636 && GET_CODE (XEXP (value, 0)) == PLUS
5637 && REG_P (XEXP (XEXP (value, 0), 0))
5638 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5639 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5641 rtx temp = expand_simple_binop (GET_MODE (value), code,
5642 XEXP (XEXP (value, 0), 0), op2,
5643 subtarget, 0, OPTAB_LIB_WIDEN);
5644 return expand_simple_binop (GET_MODE (value), code, temp,
5645 force_operand (XEXP (XEXP (value,
5647 target, 0, OPTAB_LIB_WIDEN);
5650 op1 = force_operand (XEXP (value, 0), subtarget);
5651 op2 = force_operand (op2, NULL_RTX);
5655 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5657 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5658 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5659 target, 1, OPTAB_LIB_WIDEN);
5661 return expand_divmod (0,
5662 FLOAT_MODE_P (GET_MODE (value))
5663 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5664 GET_MODE (value), op1, op2, target, 0);
5667 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5671 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5675 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5679 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5680 target, 0, OPTAB_LIB_WIDEN);
5683 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5684 target, 1, OPTAB_LIB_WIDEN);
5687 if (UNARY_P (value))
5689 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5690 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5693 #ifdef INSN_SCHEDULING
5694 /* On machines that have insn scheduling, we want all memory reference to be
5695 explicit, so we need to deal with such paradoxical SUBREGs. */
5696 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5697 && (GET_MODE_SIZE (GET_MODE (value))
5698 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5700 = simplify_gen_subreg (GET_MODE (value),
5701 force_reg (GET_MODE (SUBREG_REG (value)),
5702 force_operand (SUBREG_REG (value),
5704 GET_MODE (SUBREG_REG (value)),
5705 SUBREG_BYTE (value));
5711 /* Subroutine of expand_expr: return nonzero iff there is no way that
5712 EXP can reference X, which is being modified. TOP_P is nonzero if this
5713 call is going to be used to determine whether we need a temporary
5714 for EXP, as opposed to a recursive call to this function.
5716 It is always safe for this routine to return zero since it merely
5717 searches for optimization opportunities. */
5720 safe_from_p (rtx x, tree exp, int top_p)
5726 /* If EXP has varying size, we MUST use a target since we currently
5727 have no way of allocating temporaries of variable size
5728 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5729 So we assume here that something at a higher level has prevented a
5730 clash. This is somewhat bogus, but the best we can do. Only
5731 do this when X is BLKmode and when we are at the top level. */
5732 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5733 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5734 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5735 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5736 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5738 && GET_MODE (x) == BLKmode)
5739 /* If X is in the outgoing argument area, it is always safe. */
5741 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5742 || (GET_CODE (XEXP (x, 0)) == PLUS
5743 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5746 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5747 find the underlying pseudo. */
5748 if (GET_CODE (x) == SUBREG)
5751 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5755 /* Now look at our tree code and possibly recurse. */
5756 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5759 exp_rtl = DECL_RTL_IF_SET (exp);
5766 if (TREE_CODE (exp) == TREE_LIST)
5770 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5772 exp = TREE_CHAIN (exp);
5775 if (TREE_CODE (exp) != TREE_LIST)
5776 return safe_from_p (x, exp, 0);
5779 else if (TREE_CODE (exp) == ERROR_MARK)
5780 return 1; /* An already-visited SAVE_EXPR? */
5785 /* The only case we look at here is the DECL_INITIAL inside a
5787 return (TREE_CODE (exp) != DECL_EXPR
5788 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5789 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5790 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5794 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5799 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5803 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5804 the expression. If it is set, we conflict iff we are that rtx or
5805 both are in memory. Otherwise, we check all operands of the
5806 expression recursively. */
5808 switch (TREE_CODE (exp))
5811 /* If the operand is static or we are static, we can't conflict.
5812 Likewise if we don't conflict with the operand at all. */
5813 if (staticp (TREE_OPERAND (exp, 0))
5814 || TREE_STATIC (exp)
5815 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5818 /* Otherwise, the only way this can conflict is if we are taking
5819 the address of a DECL a that address if part of X, which is
5821 exp = TREE_OPERAND (exp, 0);
5824 if (!DECL_RTL_SET_P (exp)
5825 || !MEM_P (DECL_RTL (exp)))
5828 exp_rtl = XEXP (DECL_RTL (exp), 0);
5834 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5835 get_alias_set (exp)))
5840 /* Assume that the call will clobber all hard registers and
5842 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5847 case WITH_CLEANUP_EXPR:
5848 case CLEANUP_POINT_EXPR:
5849 /* Lowered by gimplify.c. */
5853 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5859 /* If we have an rtx, we do not need to scan our operands. */
5863 nops = first_rtl_op (TREE_CODE (exp));
5864 for (i = 0; i < nops; i++)
5865 if (TREE_OPERAND (exp, i) != 0
5866 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5869 /* If this is a language-specific tree code, it may require
5870 special handling. */
5871 if ((unsigned int) TREE_CODE (exp)
5872 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5873 && !lang_hooks.safe_from_p (x, exp))
5877 /* If we have an rtl, find any enclosed object. Then see if we conflict
5881 if (GET_CODE (exp_rtl) == SUBREG)
5883 exp_rtl = SUBREG_REG (exp_rtl);
5885 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5889 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5890 are memory and they conflict. */
5891 return ! (rtx_equal_p (x, exp_rtl)
5892 || (MEM_P (x) && MEM_P (exp_rtl)
5893 && true_dependence (exp_rtl, VOIDmode, x,
5894 rtx_addr_varies_p)));
5897 /* If we reach here, it is safe. */
5901 /* Subroutine of expand_expr: return rtx if EXP is a
5902 variable or parameter; else return 0. */
5908 switch (TREE_CODE (exp))
5912 return DECL_RTL (exp);
5918 /* Return the highest power of two that EXP is known to be a multiple of.
5919 This is used in updating alignment of MEMs in array references. */
5921 static unsigned HOST_WIDE_INT
5922 highest_pow2_factor (tree exp)
5924 unsigned HOST_WIDE_INT c0, c1;
5926 switch (TREE_CODE (exp))
5929 /* We can find the lowest bit that's a one. If the low
5930 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5931 We need to handle this case since we can find it in a COND_EXPR,
5932 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5933 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5935 if (TREE_CONSTANT_OVERFLOW (exp))
5936 return BIGGEST_ALIGNMENT;
5939 /* Note: tree_low_cst is intentionally not used here,
5940 we don't care about the upper bits. */
5941 c0 = TREE_INT_CST_LOW (exp);
5943 return c0 ? c0 : BIGGEST_ALIGNMENT;
5947 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5948 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5949 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5950 return MIN (c0, c1);
5953 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5954 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5957 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5959 if (integer_pow2p (TREE_OPERAND (exp, 1))
5960 && host_integerp (TREE_OPERAND (exp, 1), 1))
5962 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5963 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5964 return MAX (1, c0 / c1);
5968 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5970 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5973 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5976 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5977 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5978 return MIN (c0, c1);
5987 /* Similar, except that the alignment requirements of TARGET are
5988 taken into account. Assume it is at least as aligned as its
5989 type, unless it is a COMPONENT_REF in which case the layout of
5990 the structure gives the alignment. */
5992 static unsigned HOST_WIDE_INT
5993 highest_pow2_factor_for_target (tree target, tree exp)
5995 unsigned HOST_WIDE_INT target_align, factor;
5997 factor = highest_pow2_factor (exp);
5998 if (TREE_CODE (target) == COMPONENT_REF)
5999 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6001 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6002 return MAX (factor, target_align);
6005 /* Expands variable VAR. */
6008 expand_var (tree var)
6010 if (DECL_EXTERNAL (var))
6013 if (TREE_STATIC (var))
6014 /* If this is an inlined copy of a static local variable,
6015 look up the original decl. */
6016 var = DECL_ORIGIN (var);
6018 if (TREE_STATIC (var)
6019 ? !TREE_ASM_WRITTEN (var)
6020 : !DECL_RTL_SET_P (var))
6022 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6024 /* Prepare a mem & address for the decl. */
6027 if (TREE_STATIC (var))
6030 x = gen_rtx_MEM (DECL_MODE (var),
6031 gen_reg_rtx (Pmode));
6033 set_mem_attributes (x, var, 1);
6034 SET_DECL_RTL (var, x);
6036 else if (lang_hooks.expand_decl (var))
6038 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6040 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6041 rest_of_decl_compilation (var, 0, 0);
6042 else if (TREE_CODE (var) == TYPE_DECL
6043 || TREE_CODE (var) == CONST_DECL
6044 || TREE_CODE (var) == FUNCTION_DECL
6045 || TREE_CODE (var) == LABEL_DECL)
6046 /* No expansion needed. */;
6052 /* Subroutine of expand_expr. Expand the two operands of a binary
6053 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6054 The value may be stored in TARGET if TARGET is nonzero. The
6055 MODIFIER argument is as documented by expand_expr. */
6058 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6059 enum expand_modifier modifier)
6061 if (! safe_from_p (target, exp1, 1))
6063 if (operand_equal_p (exp0, exp1, 0))
6065 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6066 *op1 = copy_rtx (*op0);
6070 /* If we need to preserve evaluation order, copy exp0 into its own
6071 temporary variable so that it can't be clobbered by exp1. */
6072 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6073 exp0 = save_expr (exp0);
6074 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6075 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6080 /* expand_expr: generate code for computing expression EXP.
6081 An rtx for the computed value is returned. The value is never null.
6082 In the case of a void EXP, const0_rtx is returned.
6084 The value may be stored in TARGET if TARGET is nonzero.
6085 TARGET is just a suggestion; callers must assume that
6086 the rtx returned may not be the same as TARGET.
6088 If TARGET is CONST0_RTX, it means that the value will be ignored.
6090 If TMODE is not VOIDmode, it suggests generating the
6091 result in mode TMODE. But this is done only when convenient.
6092 Otherwise, TMODE is ignored and the value generated in its natural mode.
6093 TMODE is just a suggestion; callers must assume that
6094 the rtx returned may not have mode TMODE.
6096 Note that TARGET may have neither TMODE nor MODE. In that case, it
6097 probably will not be used.
6099 If MODIFIER is EXPAND_SUM then when EXP is an addition
6100 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6101 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6102 products as above, or REG or MEM, or constant.
6103 Ordinarily in such cases we would output mul or add instructions
6104 and then return a pseudo reg containing the sum.
6106 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6107 it also marks a label as absolutely required (it can't be dead).
6108 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6109 This is used for outputting expressions used in initializers.
6111 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6112 with a constant address even if that address is not normally legitimate.
6113 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6115 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6116 a call parameter. Such targets require special care as we haven't yet
6117 marked TARGET so that it's safe from being trashed by libcalls. We
6118 don't want to use TARGET for anything but the final result;
6119 Intermediate values must go elsewhere. Additionally, calls to
6120 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6122 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6123 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6124 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6125 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6128 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6129 enum expand_modifier, rtx *);
6132 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6133 enum expand_modifier modifier, rtx *alt_rtl)
6136 rtx ret, last = NULL;
6138 /* Handle ERROR_MARK before anybody tries to access its type. */
6139 if (TREE_CODE (exp) == ERROR_MARK
6140 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6142 ret = CONST0_RTX (tmode);
6143 return ret ? ret : const0_rtx;
6146 if (flag_non_call_exceptions)
6148 rn = lookup_stmt_eh_region (exp);
6149 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6151 last = get_last_insn ();
6154 /* If this is an expression of some kind and it has an associated line
6155 number, then emit the line number before expanding the expression.
6157 We need to save and restore the file and line information so that
6158 errors discovered during expansion are emitted with the right
6159 information. It would be better of the diagnostic routines
6160 used the file/line information embedded in the tree nodes rather
6162 if (cfun && EXPR_HAS_LOCATION (exp))
6164 location_t saved_location = input_location;
6165 input_location = EXPR_LOCATION (exp);
6166 emit_line_note (input_location);
6168 /* Record where the insns produced belong. */
6169 record_block_change (TREE_BLOCK (exp));
6171 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6173 input_location = saved_location;
6177 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6180 /* If using non-call exceptions, mark all insns that may trap.
6181 expand_call() will mark CALL_INSNs before we get to this code,
6182 but it doesn't handle libcalls, and these may trap. */
6186 for (insn = next_real_insn (last); insn;
6187 insn = next_real_insn (insn))
6189 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6190 /* If we want exceptions for non-call insns, any
6191 may_trap_p instruction may throw. */
6192 && GET_CODE (PATTERN (insn)) != CLOBBER
6193 && GET_CODE (PATTERN (insn)) != USE
6194 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6196 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6206 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6207 enum expand_modifier modifier, rtx *alt_rtl)
6210 tree type = TREE_TYPE (exp);
6212 enum machine_mode mode;
6213 enum tree_code code = TREE_CODE (exp);
6215 rtx subtarget, original_target;
6218 bool reduce_bit_field = false;
6219 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6220 ? reduce_to_bit_field_precision ((expr), \
6225 mode = TYPE_MODE (type);
6226 unsignedp = TYPE_UNSIGNED (type);
6227 if (lang_hooks.reduce_bit_field_operations
6228 && TREE_CODE (type) == INTEGER_TYPE
6229 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6231 /* An operation in what may be a bit-field type needs the
6232 result to be reduced to the precision of the bit-field type,
6233 which is narrower than that of the type's mode. */
6234 reduce_bit_field = true;
6235 if (modifier == EXPAND_STACK_PARM)
6239 /* Use subtarget as the target for operand 0 of a binary operation. */
6240 subtarget = get_subtarget (target);
6241 original_target = target;
6242 ignore = (target == const0_rtx
6243 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6244 || code == CONVERT_EXPR || code == COND_EXPR
6245 || code == VIEW_CONVERT_EXPR)
6246 && TREE_CODE (type) == VOID_TYPE));
6248 /* If we are going to ignore this result, we need only do something
6249 if there is a side-effect somewhere in the expression. If there
6250 is, short-circuit the most common cases here. Note that we must
6251 not call expand_expr with anything but const0_rtx in case this
6252 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6256 if (! TREE_SIDE_EFFECTS (exp))
6259 /* Ensure we reference a volatile object even if value is ignored, but
6260 don't do this if all we are doing is taking its address. */
6261 if (TREE_THIS_VOLATILE (exp)
6262 && TREE_CODE (exp) != FUNCTION_DECL
6263 && mode != VOIDmode && mode != BLKmode
6264 && modifier != EXPAND_CONST_ADDRESS)
6266 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6268 temp = copy_to_reg (temp);
6272 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6273 || code == INDIRECT_REF)
6274 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6277 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6278 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6280 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6281 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6284 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6285 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6286 /* If the second operand has no side effects, just evaluate
6288 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6290 else if (code == BIT_FIELD_REF)
6292 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6293 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6294 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6301 /* If will do cse, generate all results into pseudo registers
6302 since 1) that allows cse to find more things
6303 and 2) otherwise cse could produce an insn the machine
6304 cannot support. An exception is a CONSTRUCTOR into a multi-word
6305 MEM: that's much more likely to be most efficient into the MEM.
6306 Another is a CALL_EXPR which must return in memory. */
6308 if (! cse_not_expected && mode != BLKmode && target
6309 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6310 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6311 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6318 tree function = decl_function_context (exp);
6320 temp = label_rtx (exp);
6321 temp = gen_rtx_LABEL_REF (Pmode, temp);
6323 if (function != current_function_decl
6325 LABEL_REF_NONLOCAL_P (temp) = 1;
6327 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6333 /* If a static var's type was incomplete when the decl was written,
6334 but the type is complete now, lay out the decl now. */
6335 if (DECL_SIZE (exp) == 0
6336 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6337 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6338 layout_decl (exp, 0);
6340 /* ... fall through ... */
6344 if (DECL_RTL (exp) == 0)
6347 /* Ensure variable marked as used even if it doesn't go through
6348 a parser. If it hasn't be used yet, write out an external
6350 if (! TREE_USED (exp))
6352 assemble_external (exp);
6353 TREE_USED (exp) = 1;
6356 /* Show we haven't gotten RTL for this yet. */
6359 /* Variables inherited from containing functions should have
6360 been lowered by this point. */
6361 context = decl_function_context (exp);
6363 && context != current_function_decl
6364 && !TREE_STATIC (exp)
6365 /* ??? C++ creates functions that are not TREE_STATIC. */
6366 && TREE_CODE (exp) != FUNCTION_DECL)
6369 /* This is the case of an array whose size is to be determined
6370 from its initializer, while the initializer is still being parsed.
6373 else if (MEM_P (DECL_RTL (exp))
6374 && REG_P (XEXP (DECL_RTL (exp), 0)))
6375 temp = validize_mem (DECL_RTL (exp));
6377 /* If DECL_RTL is memory, we are in the normal case and either
6378 the address is not valid or it is not a register and -fforce-addr
6379 is specified, get the address into a register. */
6381 else if (MEM_P (DECL_RTL (exp))
6382 && modifier != EXPAND_CONST_ADDRESS
6383 && modifier != EXPAND_SUM
6384 && modifier != EXPAND_INITIALIZER
6385 && (! memory_address_p (DECL_MODE (exp),
6386 XEXP (DECL_RTL (exp), 0))
6388 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6391 *alt_rtl = DECL_RTL (exp);
6392 temp = replace_equiv_address (DECL_RTL (exp),
6393 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6396 /* If we got something, return it. But first, set the alignment
6397 if the address is a register. */
6400 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6401 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6406 /* If the mode of DECL_RTL does not match that of the decl, it
6407 must be a promoted value. We return a SUBREG of the wanted mode,
6408 but mark it so that we know that it was already extended. */
6410 if (REG_P (DECL_RTL (exp))
6411 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6413 /* Get the signedness used for this variable. Ensure we get the
6414 same mode we got when the variable was declared. */
6415 if (GET_MODE (DECL_RTL (exp))
6416 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6417 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6420 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6421 SUBREG_PROMOTED_VAR_P (temp) = 1;
6422 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6426 return DECL_RTL (exp);
6429 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6430 TREE_INT_CST_HIGH (exp), mode);
6432 /* ??? If overflow is set, fold will have done an incomplete job,
6433 which can result in (plus xx (const_int 0)), which can get
6434 simplified by validate_replace_rtx during virtual register
6435 instantiation, which can result in unrecognizable insns.
6436 Avoid this by forcing all overflows into registers. */
6437 if (TREE_CONSTANT_OVERFLOW (exp)
6438 && modifier != EXPAND_INITIALIZER)
6439 temp = force_reg (mode, temp);
6444 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6445 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6446 return const_vector_from_tree (exp);
6448 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6449 TREE_VECTOR_CST_ELTS (exp)),
6450 ignore ? const0_rtx : target, tmode, modifier);
6453 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6456 /* If optimized, generate immediate CONST_DOUBLE
6457 which will be turned into memory by reload if necessary.
6459 We used to force a register so that loop.c could see it. But
6460 this does not allow gen_* patterns to perform optimizations with
6461 the constants. It also produces two insns in cases like "x = 1.0;".
6462 On most machines, floating-point constants are not permitted in
6463 many insns, so we'd end up copying it to a register in any case.
6465 Now, we do the copying in expand_binop, if appropriate. */
6466 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6467 TYPE_MODE (TREE_TYPE (exp)));
6470 /* Handle evaluating a complex constant in a CONCAT target. */
6471 if (original_target && GET_CODE (original_target) == CONCAT)
6473 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6476 rtarg = XEXP (original_target, 0);
6477 itarg = XEXP (original_target, 1);
6479 /* Move the real and imaginary parts separately. */
6480 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6481 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6484 emit_move_insn (rtarg, op0);
6486 emit_move_insn (itarg, op1);
6488 return original_target;
6491 /* ... fall through ... */
6494 temp = output_constant_def (exp, 1);
6496 /* temp contains a constant address.
6497 On RISC machines where a constant address isn't valid,
6498 make some insns to get that address into a register. */
6499 if (modifier != EXPAND_CONST_ADDRESS
6500 && modifier != EXPAND_INITIALIZER
6501 && modifier != EXPAND_SUM
6502 && (! memory_address_p (mode, XEXP (temp, 0))
6503 || flag_force_addr))
6504 return replace_equiv_address (temp,
6505 copy_rtx (XEXP (temp, 0)));
6510 tree val = TREE_OPERAND (exp, 0);
6511 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6513 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6515 /* We can indeed still hit this case, typically via builtin
6516 expanders calling save_expr immediately before expanding
6517 something. Assume this means that we only have to deal
6518 with non-BLKmode values. */
6519 if (GET_MODE (ret) == BLKmode)
6522 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6523 DECL_ARTIFICIAL (val) = 1;
6524 TREE_OPERAND (exp, 0) = val;
6526 if (!CONSTANT_P (ret))
6527 ret = copy_to_reg (ret);
6528 SET_DECL_RTL (val, ret);
6535 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6536 expand_goto (TREE_OPERAND (exp, 0));
6538 expand_computed_goto (TREE_OPERAND (exp, 0));
6541 /* These are lowered during gimplification, so we should never ever
6547 case LABELED_BLOCK_EXPR:
6548 if (LABELED_BLOCK_BODY (exp))
6549 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6550 /* Should perhaps use expand_label, but this is simpler and safer. */
6551 do_pending_stack_adjust ();
6552 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6555 case EXIT_BLOCK_EXPR:
6556 if (EXIT_BLOCK_RETURN (exp))
6557 sorry ("returned value in block_exit_expr");
6558 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6562 /* If we don't need the result, just ensure we evaluate any
6568 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6569 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6574 /* All elts simple constants => refer to a constant in memory. But
6575 if this is a non-BLKmode mode, let it store a field at a time
6576 since that should make a CONST_INT or CONST_DOUBLE when we
6577 fold. Likewise, if we have a target we can use, it is best to
6578 store directly into the target unless the type is large enough
6579 that memcpy will be used. If we are making an initializer and
6580 all operands are constant, put it in memory as well.
6582 FIXME: Avoid trying to fill vector constructors piece-meal.
6583 Output them with output_constant_def below unless we're sure
6584 they're zeros. This should go away when vector initializers
6585 are treated like VECTOR_CST instead of arrays.
6587 else if ((TREE_STATIC (exp)
6588 && ((mode == BLKmode
6589 && ! (target != 0 && safe_from_p (target, exp, 1)))
6590 || TREE_ADDRESSABLE (exp)
6591 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6592 && (! MOVE_BY_PIECES_P
6593 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6595 && ! mostly_zeros_p (exp))))
6596 || ((modifier == EXPAND_INITIALIZER
6597 || modifier == EXPAND_CONST_ADDRESS)
6598 && TREE_CONSTANT (exp)))
6600 rtx constructor = output_constant_def (exp, 1);
6602 if (modifier != EXPAND_CONST_ADDRESS
6603 && modifier != EXPAND_INITIALIZER
6604 && modifier != EXPAND_SUM)
6605 constructor = validize_mem (constructor);
6611 /* Handle calls that pass values in multiple non-contiguous
6612 locations. The Irix 6 ABI has examples of this. */
6613 if (target == 0 || ! safe_from_p (target, exp, 1)
6614 || GET_CODE (target) == PARALLEL
6615 || modifier == EXPAND_STACK_PARM)
6617 = assign_temp (build_qualified_type (type,
6619 | (TREE_READONLY (exp)
6620 * TYPE_QUAL_CONST))),
6621 0, TREE_ADDRESSABLE (exp), 1);
6623 store_constructor (exp, target, 0, int_expr_size (exp));
6629 tree exp1 = TREE_OPERAND (exp, 0);
6631 if (modifier != EXPAND_WRITE)
6635 t = fold_read_from_constant_string (exp);
6637 return expand_expr (t, target, tmode, modifier);
6640 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6641 op0 = memory_address (mode, op0);
6642 temp = gen_rtx_MEM (mode, op0);
6643 set_mem_attributes (temp, exp, 0);
6645 /* If we are writing to this object and its type is a record with
6646 readonly fields, we must mark it as readonly so it will
6647 conflict with readonly references to those fields. */
6648 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6649 RTX_UNCHANGING_P (temp) = 1;
6656 #ifdef ENABLE_CHECKING
6657 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6662 tree array = TREE_OPERAND (exp, 0);
6663 tree low_bound = array_ref_low_bound (exp);
6664 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6667 /* Optimize the special-case of a zero lower bound.
6669 We convert the low_bound to sizetype to avoid some problems
6670 with constant folding. (E.g. suppose the lower bound is 1,
6671 and its mode is QI. Without the conversion, (ARRAY
6672 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6673 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6675 if (! integer_zerop (low_bound))
6676 index = size_diffop (index, convert (sizetype, low_bound));
6678 /* Fold an expression like: "foo"[2].
6679 This is not done in fold so it won't happen inside &.
6680 Don't fold if this is for wide characters since it's too
6681 difficult to do correctly and this is a very rare case. */
6683 if (modifier != EXPAND_CONST_ADDRESS
6684 && modifier != EXPAND_INITIALIZER
6685 && modifier != EXPAND_MEMORY)
6687 tree t = fold_read_from_constant_string (exp);
6690 return expand_expr (t, target, tmode, modifier);
6693 /* If this is a constant index into a constant array,
6694 just get the value from the array. Handle both the cases when
6695 we have an explicit constructor and when our operand is a variable
6696 that was declared const. */
6698 if (modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_MEMORY
6701 && TREE_CODE (array) == CONSTRUCTOR
6702 && ! TREE_SIDE_EFFECTS (array)
6703 && TREE_CODE (index) == INTEGER_CST
6704 && 0 > compare_tree_int (index,
6705 list_length (CONSTRUCTOR_ELTS
6706 (TREE_OPERAND (exp, 0)))))
6710 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6711 i = TREE_INT_CST_LOW (index);
6712 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6716 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6720 else if (optimize >= 1
6721 && modifier != EXPAND_CONST_ADDRESS
6722 && modifier != EXPAND_INITIALIZER
6723 && modifier != EXPAND_MEMORY
6724 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6725 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6726 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6727 && targetm.binds_local_p (array))
6729 if (TREE_CODE (index) == INTEGER_CST)
6731 tree init = DECL_INITIAL (array);
6733 if (TREE_CODE (init) == CONSTRUCTOR)
6737 for (elem = CONSTRUCTOR_ELTS (init);
6739 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6740 elem = TREE_CHAIN (elem))
6743 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6744 return expand_expr (fold (TREE_VALUE (elem)), target,
6747 else if (TREE_CODE (init) == STRING_CST
6748 && 0 > compare_tree_int (index,
6749 TREE_STRING_LENGTH (init)))
6751 tree type = TREE_TYPE (TREE_TYPE (init));
6752 enum machine_mode mode = TYPE_MODE (type);
6754 if (GET_MODE_CLASS (mode) == MODE_INT
6755 && GET_MODE_SIZE (mode) == 1)
6756 return gen_int_mode (TREE_STRING_POINTER (init)
6757 [TREE_INT_CST_LOW (index)], mode);
6762 goto normal_inner_ref;
6765 /* If the operand is a CONSTRUCTOR, we can just extract the
6766 appropriate field if it is present. */
6767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6771 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6772 elt = TREE_CHAIN (elt))
6773 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6774 /* We can normally use the value of the field in the
6775 CONSTRUCTOR. However, if this is a bitfield in
6776 an integral mode that we can fit in a HOST_WIDE_INT,
6777 we must mask only the number of bits in the bitfield,
6778 since this is done implicitly by the constructor. If
6779 the bitfield does not meet either of those conditions,
6780 we can't do this optimization. */
6781 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6782 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6784 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6785 <= HOST_BITS_PER_WIDE_INT))))
6787 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6788 && modifier == EXPAND_STACK_PARM)
6790 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6791 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6793 HOST_WIDE_INT bitsize
6794 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6795 enum machine_mode imode
6796 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6798 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6800 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6801 op0 = expand_and (imode, op0, op1, target);
6806 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6809 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6811 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6819 goto normal_inner_ref;
6822 case ARRAY_RANGE_REF:
6825 enum machine_mode mode1;
6826 HOST_WIDE_INT bitsize, bitpos;
6829 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6830 &mode1, &unsignedp, &volatilep);
6833 /* If we got back the original object, something is wrong. Perhaps
6834 we are evaluating an expression too early. In any event, don't
6835 infinitely recurse. */
6839 /* If TEM's type is a union of variable size, pass TARGET to the inner
6840 computation, since it will need a temporary and TARGET is known
6841 to have to do. This occurs in unchecked conversion in Ada. */
6845 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6846 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6848 && modifier != EXPAND_STACK_PARM
6849 ? target : NULL_RTX),
6851 (modifier == EXPAND_INITIALIZER
6852 || modifier == EXPAND_CONST_ADDRESS
6853 || modifier == EXPAND_STACK_PARM)
6854 ? modifier : EXPAND_NORMAL);
6856 /* If this is a constant, put it into a register if it is a
6857 legitimate constant and OFFSET is 0 and memory if it isn't. */
6858 if (CONSTANT_P (op0))
6860 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6861 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6863 op0 = force_reg (mode, op0);
6865 op0 = validize_mem (force_const_mem (mode, op0));
6868 /* Otherwise, if this object not in memory and we either have an
6869 offset or a BLKmode result, put it there. This case can't occur in
6870 C, but can in Ada if we have unchecked conversion of an expression
6871 from a scalar type to an array or record type or for an
6872 ARRAY_RANGE_REF whose type is BLKmode. */
6873 else if (!MEM_P (op0)
6875 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6877 tree nt = build_qualified_type (TREE_TYPE (tem),
6878 (TYPE_QUALS (TREE_TYPE (tem))
6879 | TYPE_QUAL_CONST));
6880 rtx memloc = assign_temp (nt, 1, 1, 1);
6882 emit_move_insn (memloc, op0);
6888 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6894 #ifdef POINTERS_EXTEND_UNSIGNED
6895 if (GET_MODE (offset_rtx) != Pmode)
6896 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6898 if (GET_MODE (offset_rtx) != ptr_mode)
6899 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6902 if (GET_MODE (op0) == BLKmode
6903 /* A constant address in OP0 can have VOIDmode, we must
6904 not try to call force_reg in that case. */
6905 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6907 && (bitpos % bitsize) == 0
6908 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6909 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6911 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6915 op0 = offset_address (op0, offset_rtx,
6916 highest_pow2_factor (offset));
6919 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6920 record its alignment as BIGGEST_ALIGNMENT. */
6921 if (MEM_P (op0) && bitpos == 0 && offset != 0
6922 && is_aligning_offset (offset, tem))
6923 set_mem_align (op0, BIGGEST_ALIGNMENT);
6925 /* Don't forget about volatility even if this is a bitfield. */
6926 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6928 if (op0 == orig_op0)
6929 op0 = copy_rtx (op0);
6931 MEM_VOLATILE_P (op0) = 1;
6934 /* The following code doesn't handle CONCAT.
6935 Assume only bitpos == 0 can be used for CONCAT, due to
6936 one element arrays having the same mode as its element. */
6937 if (GET_CODE (op0) == CONCAT)
6939 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6944 /* In cases where an aligned union has an unaligned object
6945 as a field, we might be extracting a BLKmode value from
6946 an integer-mode (e.g., SImode) object. Handle this case
6947 by doing the extract into an object as wide as the field
6948 (which we know to be the width of a basic mode), then
6949 storing into memory, and changing the mode to BLKmode. */
6950 if (mode1 == VOIDmode
6951 || REG_P (op0) || GET_CODE (op0) == SUBREG
6952 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6953 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6954 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6955 && modifier != EXPAND_CONST_ADDRESS
6956 && modifier != EXPAND_INITIALIZER)
6957 /* If the field isn't aligned enough to fetch as a memref,
6958 fetch it as a bit field. */
6959 || (mode1 != BLKmode
6960 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6961 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6963 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6964 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6965 && ((modifier == EXPAND_CONST_ADDRESS
6966 || modifier == EXPAND_INITIALIZER)
6968 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6969 || (bitpos % BITS_PER_UNIT != 0)))
6970 /* If the type and the field are a constant size and the
6971 size of the type isn't the same size as the bitfield,
6972 we must use bitfield operations. */
6974 && TYPE_SIZE (TREE_TYPE (exp))
6975 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6976 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6979 enum machine_mode ext_mode = mode;
6981 if (ext_mode == BLKmode
6982 && ! (target != 0 && MEM_P (op0)
6984 && bitpos % BITS_PER_UNIT == 0))
6985 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6987 if (ext_mode == BLKmode)
6990 target = assign_temp (type, 0, 1, 1);
6995 /* In this case, BITPOS must start at a byte boundary and
6996 TARGET, if specified, must be a MEM. */
6998 || (target != 0 && !MEM_P (target))
6999 || bitpos % BITS_PER_UNIT != 0)
7002 emit_block_move (target,
7003 adjust_address (op0, VOIDmode,
7004 bitpos / BITS_PER_UNIT),
7005 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7007 (modifier == EXPAND_STACK_PARM
7008 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7013 op0 = validize_mem (op0);
7015 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7016 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7018 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7019 (modifier == EXPAND_STACK_PARM
7020 ? NULL_RTX : target),
7021 ext_mode, ext_mode);
7023 /* If the result is a record type and BITSIZE is narrower than
7024 the mode of OP0, an integral mode, and this is a big endian
7025 machine, we must put the field into the high-order bits. */
7026 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7027 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7028 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7029 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7030 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7034 /* If the result type is BLKmode, store the data into a temporary
7035 of the appropriate type, but with the mode corresponding to the
7036 mode for the data we have (op0's mode). It's tempting to make
7037 this a constant type, since we know it's only being stored once,
7038 but that can cause problems if we are taking the address of this
7039 COMPONENT_REF because the MEM of any reference via that address
7040 will have flags corresponding to the type, which will not
7041 necessarily be constant. */
7042 if (mode == BLKmode)
7045 = assign_stack_temp_for_type
7046 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7048 emit_move_insn (new, op0);
7049 op0 = copy_rtx (new);
7050 PUT_MODE (op0, BLKmode);
7051 set_mem_attributes (op0, exp, 1);
7057 /* If the result is BLKmode, use that to access the object
7059 if (mode == BLKmode)
7062 /* Get a reference to just this component. */
7063 if (modifier == EXPAND_CONST_ADDRESS
7064 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7065 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7067 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7069 if (op0 == orig_op0)
7070 op0 = copy_rtx (op0);
7072 set_mem_attributes (op0, exp, 0);
7073 if (REG_P (XEXP (op0, 0)))
7074 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7076 MEM_VOLATILE_P (op0) |= volatilep;
7077 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7078 || modifier == EXPAND_CONST_ADDRESS
7079 || modifier == EXPAND_INITIALIZER)
7081 else if (target == 0)
7082 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7084 convert_move (target, op0, unsignedp);
7089 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7092 /* Check for a built-in function. */
7093 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7094 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7096 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7098 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7099 == BUILT_IN_FRONTEND)
7100 return lang_hooks.expand_expr (exp, original_target,
7104 return expand_builtin (exp, target, subtarget, tmode, ignore);
7107 return expand_call (exp, target, ignore);
7109 case NON_LVALUE_EXPR:
7112 if (TREE_OPERAND (exp, 0) == error_mark_node)
7115 if (TREE_CODE (type) == UNION_TYPE)
7117 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7119 /* If both input and output are BLKmode, this conversion isn't doing
7120 anything except possibly changing memory attribute. */
7121 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7123 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7126 result = copy_rtx (result);
7127 set_mem_attributes (result, exp, 0);
7133 if (TYPE_MODE (type) != BLKmode)
7134 target = gen_reg_rtx (TYPE_MODE (type));
7136 target = assign_temp (type, 0, 1, 1);
7140 /* Store data into beginning of memory target. */
7141 store_expr (TREE_OPERAND (exp, 0),
7142 adjust_address (target, TYPE_MODE (valtype), 0),
7143 modifier == EXPAND_STACK_PARM ? 2 : 0);
7145 else if (REG_P (target))
7146 /* Store this field into a union of the proper type. */
7147 store_field (target,
7148 MIN ((int_size_in_bytes (TREE_TYPE
7149 (TREE_OPERAND (exp, 0)))
7151 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7152 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7153 VOIDmode, 0, type, 0);
7157 /* Return the entire union. */
7161 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7163 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7166 /* If the signedness of the conversion differs and OP0 is
7167 a promoted SUBREG, clear that indication since we now
7168 have to do the proper extension. */
7169 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7170 && GET_CODE (op0) == SUBREG)
7171 SUBREG_PROMOTED_VAR_P (op0) = 0;
7173 return REDUCE_BIT_FIELD (op0);
7176 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7177 op0 = REDUCE_BIT_FIELD (op0);
7178 if (GET_MODE (op0) == mode)
7181 /* If OP0 is a constant, just convert it into the proper mode. */
7182 if (CONSTANT_P (op0))
7184 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7185 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7187 if (modifier == EXPAND_INITIALIZER)
7188 return simplify_gen_subreg (mode, op0, inner_mode,
7189 subreg_lowpart_offset (mode,
7192 return convert_modes (mode, inner_mode, op0,
7193 TYPE_UNSIGNED (inner_type));
7196 if (modifier == EXPAND_INITIALIZER)
7197 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7201 convert_to_mode (mode, op0,
7202 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7204 convert_move (target, op0,
7205 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7208 case VIEW_CONVERT_EXPR:
7209 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7211 /* If the input and output modes are both the same, we are done.
7212 Otherwise, if neither mode is BLKmode and both are integral and within
7213 a word, we can use gen_lowpart. If neither is true, make sure the
7214 operand is in memory and convert the MEM to the new mode. */
7215 if (TYPE_MODE (type) == GET_MODE (op0))
7217 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7218 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7219 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7220 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7221 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7222 op0 = gen_lowpart (TYPE_MODE (type), op0);
7223 else if (!MEM_P (op0))
7225 /* If the operand is not a MEM, force it into memory. Since we
7226 are going to be be changing the mode of the MEM, don't call
7227 force_const_mem for constants because we don't allow pool
7228 constants to change mode. */
7229 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7231 if (TREE_ADDRESSABLE (exp))
7234 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7236 = assign_stack_temp_for_type
7237 (TYPE_MODE (inner_type),
7238 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7240 emit_move_insn (target, op0);
7244 /* At this point, OP0 is in the correct mode. If the output type is such
7245 that the operand is known to be aligned, indicate that it is.
7246 Otherwise, we need only be concerned about alignment for non-BLKmode
7250 op0 = copy_rtx (op0);
7252 if (TYPE_ALIGN_OK (type))
7253 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7254 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7255 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7257 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7258 HOST_WIDE_INT temp_size
7259 = MAX (int_size_in_bytes (inner_type),
7260 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7261 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7262 temp_size, 0, type);
7263 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7265 if (TREE_ADDRESSABLE (exp))
7268 if (GET_MODE (op0) == BLKmode)
7269 emit_block_move (new_with_op0_mode, op0,
7270 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7271 (modifier == EXPAND_STACK_PARM
7272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7274 emit_move_insn (new_with_op0_mode, op0);
7279 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7285 this_optab = ! unsignedp && flag_trapv
7286 && (GET_MODE_CLASS (mode) == MODE_INT)
7287 ? addv_optab : add_optab;
7289 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7290 something else, make sure we add the register to the constant and
7291 then to the other thing. This case can occur during strength
7292 reduction and doing it this way will produce better code if the
7293 frame pointer or argument pointer is eliminated.
7295 fold-const.c will ensure that the constant is always in the inner
7296 PLUS_EXPR, so the only case we need to do anything about is if
7297 sp, ap, or fp is our second argument, in which case we must swap
7298 the innermost first argument and our second argument. */
7300 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7301 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7302 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7303 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7304 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7305 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7307 tree t = TREE_OPERAND (exp, 1);
7309 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7310 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7313 /* If the result is to be ptr_mode and we are adding an integer to
7314 something, we might be forming a constant. So try to use
7315 plus_constant. If it produces a sum and we can't accept it,
7316 use force_operand. This allows P = &ARR[const] to generate
7317 efficient code on machines where a SYMBOL_REF is not a valid
7320 If this is an EXPAND_SUM call, always return the sum. */
7321 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7322 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7324 if (modifier == EXPAND_STACK_PARM)
7326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7327 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7328 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7332 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7334 /* Use immed_double_const to ensure that the constant is
7335 truncated according to the mode of OP1, then sign extended
7336 to a HOST_WIDE_INT. Using the constant directly can result
7337 in non-canonical RTL in a 64x32 cross compile. */
7339 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7341 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7342 op1 = plus_constant (op1, INTVAL (constant_part));
7343 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7344 op1 = force_operand (op1, target);
7345 return REDUCE_BIT_FIELD (op1);
7348 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7349 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7350 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7355 (modifier == EXPAND_INITIALIZER
7356 ? EXPAND_INITIALIZER : EXPAND_SUM));
7357 if (! CONSTANT_P (op0))
7359 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7360 VOIDmode, modifier);
7361 /* Return a PLUS if modifier says it's OK. */
7362 if (modifier == EXPAND_SUM
7363 || modifier == EXPAND_INITIALIZER)
7364 return simplify_gen_binary (PLUS, mode, op0, op1);
7367 /* Use immed_double_const to ensure that the constant is
7368 truncated according to the mode of OP1, then sign extended
7369 to a HOST_WIDE_INT. Using the constant directly can result
7370 in non-canonical RTL in a 64x32 cross compile. */
7372 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7374 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7375 op0 = plus_constant (op0, INTVAL (constant_part));
7376 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7377 op0 = force_operand (op0, target);
7378 return REDUCE_BIT_FIELD (op0);
7382 /* No sense saving up arithmetic to be done
7383 if it's all in the wrong mode to form part of an address.
7384 And force_operand won't know whether to sign-extend or
7386 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7387 || mode != ptr_mode)
7389 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7390 subtarget, &op0, &op1, 0);
7391 if (op0 == const0_rtx)
7393 if (op1 == const0_rtx)
7398 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7399 subtarget, &op0, &op1, modifier);
7400 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7403 /* For initializers, we are allowed to return a MINUS of two
7404 symbolic constants. Here we handle all cases when both operands
7406 /* Handle difference of two symbolic constants,
7407 for the sake of an initializer. */
7408 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7409 && really_constant_p (TREE_OPERAND (exp, 0))
7410 && really_constant_p (TREE_OPERAND (exp, 1)))
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 NULL_RTX, &op0, &op1, modifier);
7415 /* If the last operand is a CONST_INT, use plus_constant of
7416 the negated constant. Else make the MINUS. */
7417 if (GET_CODE (op1) == CONST_INT)
7418 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7420 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7423 this_optab = ! unsignedp && flag_trapv
7424 && (GET_MODE_CLASS(mode) == MODE_INT)
7425 ? subv_optab : sub_optab;
7427 /* No sense saving up arithmetic to be done
7428 if it's all in the wrong mode to form part of an address.
7429 And force_operand won't know whether to sign-extend or
7431 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7432 || mode != ptr_mode)
7435 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7436 subtarget, &op0, &op1, modifier);
7438 /* Convert A - const to A + (-const). */
7439 if (GET_CODE (op1) == CONST_INT)
7441 op1 = negate_rtx (mode, op1);
7442 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7448 /* If first operand is constant, swap them.
7449 Thus the following special case checks need only
7450 check the second operand. */
7451 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7453 tree t1 = TREE_OPERAND (exp, 0);
7454 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7455 TREE_OPERAND (exp, 1) = t1;
7458 /* Attempt to return something suitable for generating an
7459 indexed address, for machines that support that. */
7461 if (modifier == EXPAND_SUM && mode == ptr_mode
7462 && host_integerp (TREE_OPERAND (exp, 1), 0))
7464 tree exp1 = TREE_OPERAND (exp, 1);
7466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7470 op0 = force_operand (op0, NULL_RTX);
7472 op0 = copy_to_mode_reg (mode, op0);
7474 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7475 gen_int_mode (tree_low_cst (exp1, 0),
7476 TYPE_MODE (TREE_TYPE (exp1)))));
7479 if (modifier == EXPAND_STACK_PARM)
7482 /* Check for multiplying things that have been extended
7483 from a narrower type. If this machine supports multiplying
7484 in that narrower type with a result in the desired type,
7485 do it that way, and avoid the explicit type-conversion. */
7486 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7487 && TREE_CODE (type) == INTEGER_TYPE
7488 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7489 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7490 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7491 && int_fits_type_p (TREE_OPERAND (exp, 1),
7492 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7493 /* Don't use a widening multiply if a shift will do. */
7494 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7495 > HOST_BITS_PER_WIDE_INT)
7496 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7498 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7499 && (TYPE_PRECISION (TREE_TYPE
7500 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7501 == TYPE_PRECISION (TREE_TYPE
7503 (TREE_OPERAND (exp, 0), 0))))
7504 /* If both operands are extended, they must either both
7505 be zero-extended or both be sign-extended. */
7506 && (TYPE_UNSIGNED (TREE_TYPE
7507 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7508 == TYPE_UNSIGNED (TREE_TYPE
7510 (TREE_OPERAND (exp, 0), 0)))))))
7512 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7513 enum machine_mode innermode = TYPE_MODE (op0type);
7514 bool zextend_p = TYPE_UNSIGNED (op0type);
7515 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7516 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7518 if (mode == GET_MODE_WIDER_MODE (innermode))
7520 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7522 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7523 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7524 TREE_OPERAND (exp, 1),
7525 NULL_RTX, &op0, &op1, 0);
7527 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7528 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7529 NULL_RTX, &op0, &op1, 0);
7532 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7533 && innermode == word_mode)
7536 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7537 NULL_RTX, VOIDmode, 0);
7538 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7539 op1 = convert_modes (innermode, mode,
7540 expand_expr (TREE_OPERAND (exp, 1),
7541 NULL_RTX, VOIDmode, 0),
7544 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7545 NULL_RTX, VOIDmode, 0);
7546 temp = expand_binop (mode, other_optab, op0, op1, target,
7547 unsignedp, OPTAB_LIB_WIDEN);
7548 hipart = gen_highpart (innermode, temp);
7549 htem = expand_mult_highpart_adjust (innermode, hipart,
7553 emit_move_insn (hipart, htem);
7554 return REDUCE_BIT_FIELD (temp);
7558 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7559 subtarget, &op0, &op1, 0);
7560 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7562 case TRUNC_DIV_EXPR:
7563 case FLOOR_DIV_EXPR:
7565 case ROUND_DIV_EXPR:
7566 case EXACT_DIV_EXPR:
7567 if (modifier == EXPAND_STACK_PARM)
7569 /* Possible optimization: compute the dividend with EXPAND_SUM
7570 then if the divisor is constant can optimize the case
7571 where some terms of the dividend have coeffs divisible by it. */
7572 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7573 subtarget, &op0, &op1, 0);
7574 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7577 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7578 expensive divide. If not, combine will rebuild the original
7580 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7581 && TREE_CODE (type) == REAL_TYPE
7582 && !real_onep (TREE_OPERAND (exp, 0)))
7583 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7584 build2 (RDIV_EXPR, type,
7585 build_real (type, dconst1),
7586 TREE_OPERAND (exp, 1))),
7587 target, tmode, modifier);
7588 this_optab = sdiv_optab;
7591 case TRUNC_MOD_EXPR:
7592 case FLOOR_MOD_EXPR:
7594 case ROUND_MOD_EXPR:
7595 if (modifier == EXPAND_STACK_PARM)
7597 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7598 subtarget, &op0, &op1, 0);
7599 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7601 case FIX_ROUND_EXPR:
7602 case FIX_FLOOR_EXPR:
7604 abort (); /* Not used for C. */
7606 case FIX_TRUNC_EXPR:
7607 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7608 if (target == 0 || modifier == EXPAND_STACK_PARM)
7609 target = gen_reg_rtx (mode);
7610 expand_fix (target, op0, unsignedp);
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7615 if (target == 0 || modifier == EXPAND_STACK_PARM)
7616 target = gen_reg_rtx (mode);
7617 /* expand_float can't figure out what to do if FROM has VOIDmode.
7618 So give it the correct mode. With -O, cse will optimize this. */
7619 if (GET_MODE (op0) == VOIDmode)
7620 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7622 expand_float (target, op0,
7623 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7627 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7628 if (modifier == EXPAND_STACK_PARM)
7630 temp = expand_unop (mode,
7631 ! unsignedp && flag_trapv
7632 && (GET_MODE_CLASS(mode) == MODE_INT)
7633 ? negv_optab : neg_optab, op0, target, 0);
7636 return REDUCE_BIT_FIELD (temp);
7639 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7640 if (modifier == EXPAND_STACK_PARM)
7643 /* ABS_EXPR is not valid for complex arguments. */
7644 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7645 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7648 /* Unsigned abs is simply the operand. Testing here means we don't
7649 risk generating incorrect code below. */
7650 if (TYPE_UNSIGNED (type))
7653 return expand_abs (mode, op0, target, unsignedp,
7654 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7658 target = original_target;
7660 || modifier == EXPAND_STACK_PARM
7661 || (MEM_P (target) && MEM_VOLATILE_P (target))
7662 || GET_MODE (target) != mode
7664 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7665 target = gen_reg_rtx (mode);
7666 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7667 target, &op0, &op1, 0);
7669 /* First try to do it with a special MIN or MAX instruction.
7670 If that does not win, use a conditional jump to select the proper
7672 this_optab = (unsignedp
7673 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7674 : (code == MIN_EXPR ? smin_optab : smax_optab));
7676 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7681 /* At this point, a MEM target is no longer useful; we will get better
7685 target = gen_reg_rtx (mode);
7687 /* If op1 was placed in target, swap op0 and op1. */
7688 if (target != op0 && target == op1)
7696 emit_move_insn (target, op0);
7698 op0 = gen_label_rtx ();
7700 /* If this mode is an integer too wide to compare properly,
7701 compare word by word. Rely on cse to optimize constant cases. */
7702 if (GET_MODE_CLASS (mode) == MODE_INT
7703 && ! can_compare_p (GE, mode, ccp_jump))
7705 if (code == MAX_EXPR)
7706 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7709 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7714 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7715 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7717 emit_move_insn (target, op1);
7722 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7723 if (modifier == EXPAND_STACK_PARM)
7725 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7730 /* ??? Can optimize bitwise operations with one arg constant.
7731 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7732 and (a bitwise1 b) bitwise2 b (etc)
7733 but that is probably not worth while. */
7735 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7736 boolean values when we want in all cases to compute both of them. In
7737 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7738 as actual zero-or-1 values and then bitwise anding. In cases where
7739 there cannot be any side effects, better code would be made by
7740 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7741 how to recognize those cases. */
7743 case TRUTH_AND_EXPR:
7745 this_optab = and_optab;
7750 this_optab = ior_optab;
7753 case TRUTH_XOR_EXPR:
7755 this_optab = xor_optab;
7762 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7764 if (modifier == EXPAND_STACK_PARM)
7766 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7767 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7770 /* Could determine the answer when only additive constants differ. Also,
7771 the addition of one can be handled by changing the condition. */
7778 case UNORDERED_EXPR:
7786 temp = do_store_flag (exp,
7787 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7788 tmode != VOIDmode ? tmode : mode, 0);
7792 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7793 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7795 && REG_P (original_target)
7796 && (GET_MODE (original_target)
7797 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7799 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7802 /* If temp is constant, we can just compute the result. */
7803 if (GET_CODE (temp) == CONST_INT)
7805 if (INTVAL (temp) != 0)
7806 emit_move_insn (target, const1_rtx);
7808 emit_move_insn (target, const0_rtx);
7813 if (temp != original_target)
7815 enum machine_mode mode1 = GET_MODE (temp);
7816 if (mode1 == VOIDmode)
7817 mode1 = tmode != VOIDmode ? tmode : mode;
7819 temp = copy_to_mode_reg (mode1, temp);
7822 op1 = gen_label_rtx ();
7823 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7824 GET_MODE (temp), unsignedp, op1);
7825 emit_move_insn (temp, const1_rtx);
7830 /* If no set-flag instruction, must generate a conditional
7831 store into a temporary variable. Drop through
7832 and handle this like && and ||. */
7834 case TRUTH_ANDIF_EXPR:
7835 case TRUTH_ORIF_EXPR:
7838 || modifier == EXPAND_STACK_PARM
7839 || ! safe_from_p (target, exp, 1)
7840 /* Make sure we don't have a hard reg (such as function's return
7841 value) live across basic blocks, if not optimizing. */
7842 || (!optimize && REG_P (target)
7843 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7844 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7847 emit_clr_insn (target);
7849 op1 = gen_label_rtx ();
7850 jumpifnot (exp, op1);
7853 emit_0_to_1_insn (target);
7856 return ignore ? const0_rtx : target;
7858 case TRUTH_NOT_EXPR:
7859 if (modifier == EXPAND_STACK_PARM)
7861 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7862 /* The parser is careful to generate TRUTH_NOT_EXPR
7863 only with operands that are always zero or one. */
7864 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7865 target, 1, OPTAB_LIB_WIDEN);
7871 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7872 return expand_expr_real (TREE_OPERAND (exp, 1),
7873 (ignore ? const0_rtx : target),
7874 VOIDmode, modifier, alt_rtl);
7876 case STATEMENT_LIST:
7878 tree_stmt_iterator iter;
7883 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7884 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7889 /* If it's void, we don't need to worry about computing a value. */
7890 if (VOID_TYPE_P (TREE_TYPE (exp)))
7892 tree pred = TREE_OPERAND (exp, 0);
7893 tree then_ = TREE_OPERAND (exp, 1);
7894 tree else_ = TREE_OPERAND (exp, 2);
7896 if (TREE_CODE (then_) == GOTO_EXPR
7897 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
7899 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7900 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7902 else if (TREE_CODE (else_) == GOTO_EXPR
7903 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
7905 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
7906 return expand_expr (then_, const0_rtx, VOIDmode, 0);
7909 /* Just use the 'if' machinery. */
7910 expand_start_cond (pred, 0);
7911 expand_expr (then_, const0_rtx, VOIDmode, 0);
7915 /* Iterate over 'else if's instead of recursing. */
7916 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
7918 expand_start_else ();
7919 if (EXPR_HAS_LOCATION (exp))
7921 emit_line_note (EXPR_LOCATION (exp));
7922 record_block_change (TREE_BLOCK (exp));
7924 expand_elseif (TREE_OPERAND (exp, 0));
7925 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
7927 /* Don't emit the jump and label if there's no 'else' clause. */
7928 if (TREE_SIDE_EFFECTS (exp))
7930 expand_start_else ();
7931 expand_expr (exp, const0_rtx, VOIDmode, 0);
7937 /* If we would have a "singleton" (see below) were it not for a
7938 conversion in each arm, bring that conversion back out. */
7939 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7940 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7941 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7942 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7944 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7945 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7947 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7948 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7949 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7950 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7951 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7952 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7953 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7954 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7955 return expand_expr (build1 (NOP_EXPR, type,
7956 build3 (COND_EXPR, TREE_TYPE (iftrue),
7957 TREE_OPERAND (exp, 0),
7959 target, tmode, modifier);
7963 /* Note that COND_EXPRs whose type is a structure or union
7964 are required to be constructed to contain assignments of
7965 a temporary variable, so that we can evaluate them here
7966 for side effect only. If type is void, we must do likewise. */
7968 /* If an arm of the branch requires a cleanup,
7969 only that cleanup is performed. */
7972 tree binary_op = 0, unary_op = 0;
7974 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7975 convert it to our mode, if necessary. */
7976 if (integer_onep (TREE_OPERAND (exp, 1))
7977 && integer_zerop (TREE_OPERAND (exp, 2))
7978 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7982 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7987 if (modifier == EXPAND_STACK_PARM)
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
7990 if (GET_MODE (op0) == mode)
7994 target = gen_reg_rtx (mode);
7995 convert_move (target, op0, unsignedp);
7999 /* Check for X ? A + B : A. If we have this, we can copy A to the
8000 output and conditionally add B. Similarly for unary operations.
8001 Don't do this if X has side-effects because those side effects
8002 might affect A or B and the "?" operation is a sequence point in
8003 ANSI. (operand_equal_p tests for side effects.) */
8005 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8006 && operand_equal_p (TREE_OPERAND (exp, 2),
8007 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8008 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8009 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8010 && operand_equal_p (TREE_OPERAND (exp, 1),
8011 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8012 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8013 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8014 && operand_equal_p (TREE_OPERAND (exp, 2),
8015 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8016 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8017 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8018 && operand_equal_p (TREE_OPERAND (exp, 1),
8019 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8020 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8022 /* If we are not to produce a result, we have no target. Otherwise,
8023 if a target was specified use it; it will not be used as an
8024 intermediate target unless it is safe. If no target, use a
8029 else if (modifier == EXPAND_STACK_PARM)
8030 temp = assign_temp (type, 0, 0, 1);
8031 else if (original_target
8032 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8033 || (singleton && REG_P (original_target)
8034 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8035 && original_target == var_rtx (singleton)))
8036 && GET_MODE (original_target) == mode
8037 #ifdef HAVE_conditional_move
8038 && (! can_conditionally_move_p (mode)
8039 || REG_P (original_target)
8040 || TREE_ADDRESSABLE (type))
8042 && (!MEM_P (original_target)
8043 || TREE_ADDRESSABLE (type)))
8044 temp = original_target;
8045 else if (TREE_ADDRESSABLE (type))
8048 temp = assign_temp (type, 0, 0, 1);
8050 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8051 do the test of X as a store-flag operation, do this as
8052 A + ((X != 0) << log C). Similarly for other simple binary
8053 operators. Only do for C == 1 if BRANCH_COST is low. */
8054 if (temp && singleton && binary_op
8055 && (TREE_CODE (binary_op) == PLUS_EXPR
8056 || TREE_CODE (binary_op) == MINUS_EXPR
8057 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8058 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8059 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8060 : integer_onep (TREE_OPERAND (binary_op, 1)))
8061 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8065 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8066 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8067 ? addv_optab : add_optab)
8068 : TREE_CODE (binary_op) == MINUS_EXPR
8069 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8070 ? subv_optab : sub_optab)
8071 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8074 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8075 if (singleton == TREE_OPERAND (exp, 1))
8076 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8078 cond = TREE_OPERAND (exp, 0);
8080 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8082 mode, BRANCH_COST <= 1);
8084 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8085 result = expand_shift (LSHIFT_EXPR, mode, result,
8086 build_int_2 (tree_log2
8090 (safe_from_p (temp, singleton, 1)
8091 ? temp : NULL_RTX), 0);
8095 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8096 return expand_binop (mode, boptab, op1, result, temp,
8097 unsignedp, OPTAB_LIB_WIDEN);
8101 do_pending_stack_adjust ();
8103 op0 = gen_label_rtx ();
8105 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8109 /* If the target conflicts with the other operand of the
8110 binary op, we can't use it. Also, we can't use the target
8111 if it is a hard register, because evaluating the condition
8112 might clobber it. */
8114 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8116 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8117 temp = gen_reg_rtx (mode);
8118 store_expr (singleton, temp,
8119 modifier == EXPAND_STACK_PARM ? 2 : 0);
8122 expand_expr (singleton,
8123 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8124 if (singleton == TREE_OPERAND (exp, 1))
8125 jumpif (TREE_OPERAND (exp, 0), op0);
8127 jumpifnot (TREE_OPERAND (exp, 0), op0);
8129 if (binary_op && temp == 0)
8130 /* Just touch the other operand. */
8131 expand_expr (TREE_OPERAND (binary_op, 1),
8132 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8134 store_expr (build2 (TREE_CODE (binary_op), type,
8135 make_tree (type, temp),
8136 TREE_OPERAND (binary_op, 1)),
8137 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8139 store_expr (build1 (TREE_CODE (unary_op), type,
8140 make_tree (type, temp)),
8141 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8144 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8145 comparison operator. If we have one of these cases, set the
8146 output to A, branch on A (cse will merge these two references),
8147 then set the output to FOO. */
8149 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8150 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8151 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8152 TREE_OPERAND (exp, 1), 0)
8153 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8154 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8155 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8158 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8159 temp = gen_reg_rtx (mode);
8160 store_expr (TREE_OPERAND (exp, 1), temp,
8161 modifier == EXPAND_STACK_PARM ? 2 : 0);
8162 jumpif (TREE_OPERAND (exp, 0), op0);
8164 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8165 store_expr (TREE_OPERAND (exp, 2), temp,
8166 modifier == EXPAND_STACK_PARM ? 2 : 0);
8168 expand_expr (TREE_OPERAND (exp, 2),
8169 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8173 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8174 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8175 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8176 TREE_OPERAND (exp, 2), 0)
8177 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8178 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8179 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8182 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8183 temp = gen_reg_rtx (mode);
8184 store_expr (TREE_OPERAND (exp, 2), temp,
8185 modifier == EXPAND_STACK_PARM ? 2 : 0);
8186 jumpifnot (TREE_OPERAND (exp, 0), op0);
8188 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8189 store_expr (TREE_OPERAND (exp, 1), temp,
8190 modifier == EXPAND_STACK_PARM ? 2 : 0);
8192 expand_expr (TREE_OPERAND (exp, 1),
8193 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8198 op1 = gen_label_rtx ();
8199 jumpifnot (TREE_OPERAND (exp, 0), op0);
8201 /* One branch of the cond can be void, if it never returns. For
8202 example A ? throw : E */
8204 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8205 store_expr (TREE_OPERAND (exp, 1), temp,
8206 modifier == EXPAND_STACK_PARM ? 2 : 0);
8208 expand_expr (TREE_OPERAND (exp, 1),
8209 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8210 emit_jump_insn (gen_jump (op1));
8214 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8215 store_expr (TREE_OPERAND (exp, 2), temp,
8216 modifier == EXPAND_STACK_PARM ? 2 : 0);
8218 expand_expr (TREE_OPERAND (exp, 2),
8219 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8230 tree lhs = TREE_OPERAND (exp, 0);
8231 tree rhs = TREE_OPERAND (exp, 1);
8233 temp = expand_assignment (lhs, rhs, ! ignore);
8239 /* If lhs is complex, expand calls in rhs before computing it.
8240 That's so we don't compute a pointer and save it over a
8241 call. If lhs is simple, compute it first so we can give it
8242 as a target if the rhs is just a call. This avoids an
8243 extra temp and copy and that prevents a partial-subsumption
8244 which makes bad code. Actually we could treat
8245 component_ref's of vars like vars. */
8247 tree lhs = TREE_OPERAND (exp, 0);
8248 tree rhs = TREE_OPERAND (exp, 1);
8252 /* Check for |= or &= of a bitfield of size one into another bitfield
8253 of size 1. In this case, (unless we need the result of the
8254 assignment) we can do this more efficiently with a
8255 test followed by an assignment, if necessary.
8257 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8258 things change so we do, this code should be enhanced to
8261 && TREE_CODE (lhs) == COMPONENT_REF
8262 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8263 || TREE_CODE (rhs) == BIT_AND_EXPR)
8264 && TREE_OPERAND (rhs, 0) == lhs
8265 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8266 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8267 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8269 rtx label = gen_label_rtx ();
8271 do_jump (TREE_OPERAND (rhs, 1),
8272 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8273 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8274 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8275 (TREE_CODE (rhs) == BIT_IOR_EXPR
8277 : integer_zero_node)),
8279 do_pending_stack_adjust ();
8284 temp = expand_assignment (lhs, rhs, ! ignore);
8290 if (!TREE_OPERAND (exp, 0))
8291 expand_null_return ();
8293 expand_return (TREE_OPERAND (exp, 0));
8297 if (modifier == EXPAND_STACK_PARM)
8299 /* If we are taking the address of something erroneous, just
8301 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8303 /* If we are taking the address of a constant and are at the
8304 top level, we have to use output_constant_def since we can't
8305 call force_const_mem at top level. */
8307 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8308 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8310 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8313 /* We make sure to pass const0_rtx down if we came in with
8314 ignore set, to avoid doing the cleanups twice for something. */
8315 op0 = expand_expr (TREE_OPERAND (exp, 0),
8316 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8317 (modifier == EXPAND_INITIALIZER
8318 ? modifier : EXPAND_CONST_ADDRESS));
8320 /* If we are going to ignore the result, OP0 will have been set
8321 to const0_rtx, so just return it. Don't get confused and
8322 think we are taking the address of the constant. */
8326 /* We would like the object in memory. If it is a constant, we can
8327 have it be statically allocated into memory. For a non-constant,
8328 we need to allocate some memory and store the value into it. */
8330 if (CONSTANT_P (op0))
8331 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8333 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8334 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8335 || GET_CODE (op0) == LO_SUM)
8337 /* If this object is in a register, it can't be BLKmode. */
8338 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8339 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8341 if (GET_CODE (op0) == PARALLEL)
8342 /* Handle calls that pass values in multiple
8343 non-contiguous locations. The Irix 6 ABI has examples
8345 emit_group_store (memloc, op0, inner_type,
8346 int_size_in_bytes (inner_type));
8348 emit_move_insn (memloc, op0);
8356 mark_temp_addr_taken (op0);
8357 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8359 op0 = XEXP (op0, 0);
8360 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8361 op0 = convert_memory_address (ptr_mode, op0);
8365 /* If OP0 is not aligned as least as much as the type requires, we
8366 need to make a temporary, copy OP0 to it, and take the address of
8367 the temporary. We want to use the alignment of the type, not of
8368 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8369 the test for BLKmode means that can't happen. The test for
8370 BLKmode is because we never make mis-aligned MEMs with
8373 We don't need to do this at all if the machine doesn't have
8374 strict alignment. */
8375 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8376 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8378 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8380 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8383 if (TYPE_ALIGN_OK (inner_type))
8386 if (TREE_ADDRESSABLE (inner_type))
8388 /* We can't make a bitwise copy of this object, so fail. */
8389 error ("cannot take the address of an unaligned member");
8393 new = assign_stack_temp_for_type
8394 (TYPE_MODE (inner_type),
8395 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8396 : int_size_in_bytes (inner_type),
8397 1, build_qualified_type (inner_type,
8398 (TYPE_QUALS (inner_type)
8399 | TYPE_QUAL_CONST)));
8401 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8402 (modifier == EXPAND_STACK_PARM
8403 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8408 op0 = force_operand (XEXP (op0, 0), target);
8413 && modifier != EXPAND_CONST_ADDRESS
8414 && modifier != EXPAND_INITIALIZER
8415 && modifier != EXPAND_SUM)
8416 op0 = force_reg (Pmode, op0);
8419 && ! REG_USERVAR_P (op0))
8420 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8422 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8423 op0 = convert_memory_address (ptr_mode, op0);
8427 case ENTRY_VALUE_EXPR:
8430 /* COMPLEX type for Extended Pascal & Fortran */
8433 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8436 /* Get the rtx code of the operands. */
8437 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8438 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8441 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8445 /* Move the real (op0) and imaginary (op1) parts to their location. */
8446 emit_move_insn (gen_realpart (mode, target), op0);
8447 emit_move_insn (gen_imagpart (mode, target), op1);
8449 insns = get_insns ();
8452 /* Complex construction should appear as a single unit. */
8453 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8454 each with a separate pseudo as destination.
8455 It's not correct for flow to treat them as a unit. */
8456 if (GET_CODE (target) != CONCAT)
8457 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8465 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8466 return gen_realpart (mode, op0);
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8470 return gen_imagpart (mode, op0);
8474 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8478 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8481 target = gen_reg_rtx (mode);
8485 /* Store the realpart and the negated imagpart to target. */
8486 emit_move_insn (gen_realpart (partmode, target),
8487 gen_realpart (partmode, op0));
8489 imag_t = gen_imagpart (partmode, target);
8490 temp = expand_unop (partmode,
8491 ! unsignedp && flag_trapv
8492 && (GET_MODE_CLASS(partmode) == MODE_INT)
8493 ? negv_optab : neg_optab,
8494 gen_imagpart (partmode, op0), imag_t, 0);
8496 emit_move_insn (imag_t, temp);
8498 insns = get_insns ();
8501 /* Conjugate should appear as a single unit
8502 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8503 each with a separate pseudo as destination.
8504 It's not correct for flow to treat them as a unit. */
8505 if (GET_CODE (target) != CONCAT)
8506 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8514 expand_resx_expr (exp);
8517 case TRY_CATCH_EXPR:
8519 case EH_FILTER_EXPR:
8520 case TRY_FINALLY_EXPR:
8521 /* Lowered by tree-eh.c. */
8524 case WITH_CLEANUP_EXPR:
8525 case CLEANUP_POINT_EXPR:
8527 case CASE_LABEL_EXPR:
8530 /* Lowered by gimplify.c. */
8534 return get_exception_pointer (cfun);
8537 return get_exception_filter (cfun);
8539 case PREINCREMENT_EXPR:
8540 case PREDECREMENT_EXPR:
8541 case POSTINCREMENT_EXPR:
8542 case POSTDECREMENT_EXPR:
8544 /* Function descriptors are not valid except for as
8545 initialization constants, and should not be expanded. */
8549 expand_start_case (SWITCH_COND (exp));
8550 /* The switch body is lowered in gimplify.c, we should never have
8551 switches with a non-NULL SWITCH_BODY here. */
8552 if (SWITCH_BODY (exp))
8554 if (SWITCH_LABELS (exp))
8556 tree vec = SWITCH_LABELS (exp);
8557 size_t i = TREE_VEC_LENGTH (vec);
8561 tree elt = TREE_VEC_ELT (vec, --i);
8562 add_case_node (CASE_LOW (elt), CASE_HIGH (elt),
8569 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8573 expand_label (TREE_OPERAND (exp, 0));
8577 expand_asm_expr (exp);
8580 case WITH_SIZE_EXPR:
8581 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8582 have pulled out the size to use in whatever context it needed. */
8583 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8587 return lang_hooks.expand_expr (exp, original_target, tmode,
8591 /* Here to do an ordinary binary operator, generating an instruction
8592 from the optab already placed in `this_optab'. */
8594 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8595 subtarget, &op0, &op1, 0);
8597 if (modifier == EXPAND_STACK_PARM)
8599 temp = expand_binop (mode, this_optab, op0, op1, target,
8600 unsignedp, OPTAB_LIB_WIDEN);
8603 return REDUCE_BIT_FIELD (temp);
8605 #undef REDUCE_BIT_FIELD
8607 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8608 signedness of TYPE), possibly returning the result in TARGET. */
8610 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8612 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8613 if (target && GET_MODE (target) != GET_MODE (exp))
8615 if (TYPE_UNSIGNED (type))
8618 if (prec < HOST_BITS_PER_WIDE_INT)
8619 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8622 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8623 ((unsigned HOST_WIDE_INT) 1
8624 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8626 return expand_and (GET_MODE (exp), exp, mask, target);
8630 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8631 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8632 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8636 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8637 when applied to the address of EXP produces an address known to be
8638 aligned more than BIGGEST_ALIGNMENT. */
8641 is_aligning_offset (tree offset, tree exp)
8643 /* Strip off any conversions. */
8644 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8645 || TREE_CODE (offset) == NOP_EXPR
8646 || TREE_CODE (offset) == CONVERT_EXPR)
8647 offset = TREE_OPERAND (offset, 0);
8649 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8650 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8651 if (TREE_CODE (offset) != BIT_AND_EXPR
8652 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8653 || compare_tree_int (TREE_OPERAND (offset, 1),
8654 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8655 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8658 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8659 It must be NEGATE_EXPR. Then strip any more conversions. */
8660 offset = TREE_OPERAND (offset, 0);
8661 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8662 || TREE_CODE (offset) == NOP_EXPR
8663 || TREE_CODE (offset) == CONVERT_EXPR)
8664 offset = TREE_OPERAND (offset, 0);
8666 if (TREE_CODE (offset) != NEGATE_EXPR)
8669 offset = TREE_OPERAND (offset, 0);
8670 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8671 || TREE_CODE (offset) == NOP_EXPR
8672 || TREE_CODE (offset) == CONVERT_EXPR)
8673 offset = TREE_OPERAND (offset, 0);
8675 /* This must now be the address of EXP. */
8676 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8679 /* Return the tree node if an ARG corresponds to a string constant or zero
8680 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8681 in bytes within the string that ARG is accessing. The type of the
8682 offset will be `sizetype'. */
8685 string_constant (tree arg, tree *ptr_offset)
8689 if (TREE_CODE (arg) == ADDR_EXPR
8690 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8692 *ptr_offset = size_zero_node;
8693 return TREE_OPERAND (arg, 0);
8695 if (TREE_CODE (arg) == ADDR_EXPR
8696 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8699 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8700 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8702 else if (TREE_CODE (arg) == PLUS_EXPR)
8704 tree arg0 = TREE_OPERAND (arg, 0);
8705 tree arg1 = TREE_OPERAND (arg, 1);
8710 if (TREE_CODE (arg0) == ADDR_EXPR
8711 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8713 *ptr_offset = convert (sizetype, arg1);
8714 return TREE_OPERAND (arg0, 0);
8716 else if (TREE_CODE (arg1) == ADDR_EXPR
8717 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8719 *ptr_offset = convert (sizetype, arg0);
8720 return TREE_OPERAND (arg1, 0);
8727 /* Generate code to calculate EXP using a store-flag instruction
8728 and return an rtx for the result. EXP is either a comparison
8729 or a TRUTH_NOT_EXPR whose operand is a comparison.
8731 If TARGET is nonzero, store the result there if convenient.
8733 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8736 Return zero if there is no suitable set-flag instruction
8737 available on this machine.
8739 Once expand_expr has been called on the arguments of the comparison,
8740 we are committed to doing the store flag, since it is not safe to
8741 re-evaluate the expression. We emit the store-flag insn by calling
8742 emit_store_flag, but only expand the arguments if we have a reason
8743 to believe that emit_store_flag will be successful. If we think that
8744 it will, but it isn't, we have to simulate the store-flag with a
8745 set/jump/set sequence. */
8748 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8751 tree arg0, arg1, type;
8753 enum machine_mode operand_mode;
8757 enum insn_code icode;
8758 rtx subtarget = target;
8761 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8762 result at the end. We can't simply invert the test since it would
8763 have already been inverted if it were valid. This case occurs for
8764 some floating-point comparisons. */
8766 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8767 invert = 1, exp = TREE_OPERAND (exp, 0);
8769 arg0 = TREE_OPERAND (exp, 0);
8770 arg1 = TREE_OPERAND (exp, 1);
8772 /* Don't crash if the comparison was erroneous. */
8773 if (arg0 == error_mark_node || arg1 == error_mark_node)
8776 type = TREE_TYPE (arg0);
8777 operand_mode = TYPE_MODE (type);
8778 unsignedp = TYPE_UNSIGNED (type);
8780 /* We won't bother with BLKmode store-flag operations because it would mean
8781 passing a lot of information to emit_store_flag. */
8782 if (operand_mode == BLKmode)
8785 /* We won't bother with store-flag operations involving function pointers
8786 when function pointers must be canonicalized before comparisons. */
8787 #ifdef HAVE_canonicalize_funcptr_for_compare
8788 if (HAVE_canonicalize_funcptr_for_compare
8789 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8790 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8792 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8793 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8794 == FUNCTION_TYPE))))
8801 /* Get the rtx comparison code to use. We know that EXP is a comparison
8802 operation of some type. Some comparisons against 1 and -1 can be
8803 converted to comparisons with zero. Do so here so that the tests
8804 below will be aware that we have a comparison with zero. These
8805 tests will not catch constants in the first operand, but constants
8806 are rarely passed as the first operand. */
8808 switch (TREE_CODE (exp))
8817 if (integer_onep (arg1))
8818 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8820 code = unsignedp ? LTU : LT;
8823 if (! unsignedp && integer_all_onesp (arg1))
8824 arg1 = integer_zero_node, code = LT;
8826 code = unsignedp ? LEU : LE;
8829 if (! unsignedp && integer_all_onesp (arg1))
8830 arg1 = integer_zero_node, code = GE;
8832 code = unsignedp ? GTU : GT;
8835 if (integer_onep (arg1))
8836 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8838 code = unsignedp ? GEU : GE;
8841 case UNORDERED_EXPR:
8870 /* Put a constant second. */
8871 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8873 tem = arg0; arg0 = arg1; arg1 = tem;
8874 code = swap_condition (code);
8877 /* If this is an equality or inequality test of a single bit, we can
8878 do this by shifting the bit being tested to the low-order bit and
8879 masking the result with the constant 1. If the condition was EQ,
8880 we xor it with 1. This does not require an scc insn and is faster
8881 than an scc insn even if we have it.
8883 The code to make this transformation was moved into fold_single_bit_test,
8884 so we just call into the folder and expand its result. */
8886 if ((code == NE || code == EQ)
8887 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8888 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8890 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8891 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8893 target, VOIDmode, EXPAND_NORMAL);
8896 /* Now see if we are likely to be able to do this. Return if not. */
8897 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8900 icode = setcc_gen_code[(int) code];
8901 if (icode == CODE_FOR_nothing
8902 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8904 /* We can only do this if it is one of the special cases that
8905 can be handled without an scc insn. */
8906 if ((code == LT && integer_zerop (arg1))
8907 || (! only_cheap && code == GE && integer_zerop (arg1)))
8909 else if (BRANCH_COST >= 0
8910 && ! only_cheap && (code == NE || code == EQ)
8911 && TREE_CODE (type) != REAL_TYPE
8912 && ((abs_optab->handlers[(int) operand_mode].insn_code
8913 != CODE_FOR_nothing)
8914 || (ffs_optab->handlers[(int) operand_mode].insn_code
8915 != CODE_FOR_nothing)))
8921 if (! get_subtarget (target)
8922 || GET_MODE (subtarget) != operand_mode)
8925 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8928 target = gen_reg_rtx (mode);
8930 result = emit_store_flag (target, code, op0, op1,
8931 operand_mode, unsignedp, 1);
8936 result = expand_binop (mode, xor_optab, result, const1_rtx,
8937 result, 0, OPTAB_LIB_WIDEN);
8941 /* If this failed, we have to do this with set/compare/jump/set code. */
8943 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8944 target = gen_reg_rtx (GET_MODE (target));
8946 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8947 result = compare_from_rtx (op0, op1, code, unsignedp,
8948 operand_mode, NULL_RTX);
8949 if (GET_CODE (result) == CONST_INT)
8950 return (((result == const0_rtx && ! invert)
8951 || (result != const0_rtx && invert))
8952 ? const0_rtx : const1_rtx);
8954 /* The code of RESULT may not match CODE if compare_from_rtx
8955 decided to swap its operands and reverse the original code.
8957 We know that compare_from_rtx returns either a CONST_INT or
8958 a new comparison code, so it is safe to just extract the
8959 code from RESULT. */
8960 code = GET_CODE (result);
8962 label = gen_label_rtx ();
8963 if (bcc_gen_fctn[(int) code] == 0)
8966 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8967 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8974 /* Stubs in case we haven't got a casesi insn. */
8976 # define HAVE_casesi 0
8977 # define gen_casesi(a, b, c, d, e) (0)
8978 # define CODE_FOR_casesi CODE_FOR_nothing
8981 /* If the machine does not have a case insn that compares the bounds,
8982 this means extra overhead for dispatch tables, which raises the
8983 threshold for using them. */
8984 #ifndef CASE_VALUES_THRESHOLD
8985 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8986 #endif /* CASE_VALUES_THRESHOLD */
8989 case_values_threshold (void)
8991 return CASE_VALUES_THRESHOLD;
8994 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8995 0 otherwise (i.e. if there is no casesi instruction). */
8997 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8998 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9000 enum machine_mode index_mode = SImode;
9001 int index_bits = GET_MODE_BITSIZE (index_mode);
9002 rtx op1, op2, index;
9003 enum machine_mode op_mode;
9008 /* Convert the index to SImode. */
9009 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9011 enum machine_mode omode = TYPE_MODE (index_type);
9012 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9014 /* We must handle the endpoints in the original mode. */
9015 index_expr = build2 (MINUS_EXPR, index_type,
9016 index_expr, minval);
9017 minval = integer_zero_node;
9018 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9019 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9020 omode, 1, default_label);
9021 /* Now we can safely truncate. */
9022 index = convert_to_mode (index_mode, index, 0);
9026 if (TYPE_MODE (index_type) != index_mode)
9028 index_expr = convert (lang_hooks.types.type_for_size
9029 (index_bits, 0), index_expr);
9030 index_type = TREE_TYPE (index_expr);
9033 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9036 do_pending_stack_adjust ();
9038 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9039 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9041 index = copy_to_mode_reg (op_mode, index);
9043 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9045 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9046 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9047 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9048 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9050 op1 = copy_to_mode_reg (op_mode, op1);
9052 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9054 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9055 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9056 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9057 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9059 op2 = copy_to_mode_reg (op_mode, op2);
9061 emit_jump_insn (gen_casesi (index, op1, op2,
9062 table_label, default_label));
9066 /* Attempt to generate a tablejump instruction; same concept. */
9067 #ifndef HAVE_tablejump
9068 #define HAVE_tablejump 0
9069 #define gen_tablejump(x, y) (0)
9072 /* Subroutine of the next function.
9074 INDEX is the value being switched on, with the lowest value
9075 in the table already subtracted.
9076 MODE is its expected mode (needed if INDEX is constant).
9077 RANGE is the length of the jump table.
9078 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9080 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9081 index value is out of range. */
9084 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9089 if (INTVAL (range) > cfun->max_jumptable_ents)
9090 cfun->max_jumptable_ents = INTVAL (range);
9092 /* Do an unsigned comparison (in the proper mode) between the index
9093 expression and the value which represents the length of the range.
9094 Since we just finished subtracting the lower bound of the range
9095 from the index expression, this comparison allows us to simultaneously
9096 check that the original index expression value is both greater than
9097 or equal to the minimum value of the range and less than or equal to
9098 the maximum value of the range. */
9100 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9103 /* If index is in range, it must fit in Pmode.
9104 Convert to Pmode so we can index with it. */
9106 index = convert_to_mode (Pmode, index, 1);
9108 /* Don't let a MEM slip through, because then INDEX that comes
9109 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9110 and break_out_memory_refs will go to work on it and mess it up. */
9111 #ifdef PIC_CASE_VECTOR_ADDRESS
9112 if (flag_pic && !REG_P (index))
9113 index = copy_to_mode_reg (Pmode, index);
9116 /* If flag_force_addr were to affect this address
9117 it could interfere with the tricky assumptions made
9118 about addresses that contain label-refs,
9119 which may be valid only very near the tablejump itself. */
9120 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9121 GET_MODE_SIZE, because this indicates how large insns are. The other
9122 uses should all be Pmode, because they are addresses. This code
9123 could fail if addresses and insns are not the same size. */
9124 index = gen_rtx_PLUS (Pmode,
9125 gen_rtx_MULT (Pmode, index,
9126 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9127 gen_rtx_LABEL_REF (Pmode, table_label));
9128 #ifdef PIC_CASE_VECTOR_ADDRESS
9130 index = PIC_CASE_VECTOR_ADDRESS (index);
9133 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9134 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9135 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9136 RTX_UNCHANGING_P (vector) = 1;
9137 MEM_NOTRAP_P (vector) = 1;
9138 convert_move (temp, vector, 0);
9140 emit_jump_insn (gen_tablejump (temp, table_label));
9142 /* If we are generating PIC code or if the table is PC-relative, the
9143 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9144 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9149 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9150 rtx table_label, rtx default_label)
9154 if (! HAVE_tablejump)
9157 index_expr = fold (build2 (MINUS_EXPR, index_type,
9158 convert (index_type, index_expr),
9159 convert (index_type, minval)));
9160 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9161 do_pending_stack_adjust ();
9163 do_tablejump (index, TYPE_MODE (index_type),
9164 convert_modes (TYPE_MODE (index_type),
9165 TYPE_MODE (TREE_TYPE (range)),
9166 expand_expr (range, NULL_RTX,
9168 TYPE_UNSIGNED (TREE_TYPE (range))),
9169 table_label, default_label);
9173 /* Nonzero if the mode is a valid vector mode for this architecture.
9174 This returns nonzero even if there is no hardware support for the
9175 vector mode, but we can emulate with narrower modes. */
9178 vector_mode_valid_p (enum machine_mode mode)
9180 enum mode_class class = GET_MODE_CLASS (mode);
9181 enum machine_mode innermode;
9183 /* Doh! What's going on? */
9184 if (class != MODE_VECTOR_INT
9185 && class != MODE_VECTOR_FLOAT)
9188 /* Hardware support. Woo hoo! */
9189 if (VECTOR_MODE_SUPPORTED_P (mode))
9192 innermode = GET_MODE_INNER (mode);
9194 /* We should probably return 1 if requesting V4DI and we have no DI,
9195 but we have V2DI, but this is probably very unlikely. */
9197 /* If we have support for the inner mode, we can safely emulate it.
9198 We may not have V2DI, but me can emulate with a pair of DIs. */
9199 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9202 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9204 const_vector_from_tree (tree exp)
9209 enum machine_mode inner, mode;
9211 mode = TYPE_MODE (TREE_TYPE (exp));
9213 if (initializer_zerop (exp))
9214 return CONST0_RTX (mode);
9216 units = GET_MODE_NUNITS (mode);
9217 inner = GET_MODE_INNER (mode);
9219 v = rtvec_alloc (units);
9221 link = TREE_VECTOR_CST_ELTS (exp);
9222 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9224 elt = TREE_VALUE (link);
9226 if (TREE_CODE (elt) == REAL_CST)
9227 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9230 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9231 TREE_INT_CST_HIGH (elt),
9235 /* Initialize remaining elements to 0. */
9236 for (; i < units; ++i)
9237 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9239 return gen_rtx_raw_CONST_VECTOR (mode, v);
9241 #include "gt-expr.h"