1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162 static void write_complex_part (rtx, rtx, bool);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO)
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO)
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero, or
193 to "memcpy" storage when the source is a constant string. */
194 #ifndef STORE_BY_PIECES_P
195 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
196 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
197 < (unsigned int) MOVE_RATIO)
200 /* This array records the insn_code of insns to perform block moves. */
201 enum insn_code movmem_optab[NUM_MACHINE_MODES];
203 /* This array records the insn_code of insns to perform block clears. */
204 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
206 /* These arrays record the insn_code of two different kinds of insns
207 to perform block compares. */
208 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
221 init_expr_once (void)
224 enum machine_mode mode;
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235 /* A scratch register we can modify in-place below to avoid
236 useless RTL allocations. */
237 reg = gen_rtx_REG (VOIDmode, -1);
239 insn = rtx_alloc (INSN);
240 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
241 PATTERN (insn) = pat;
243 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
244 mode = (enum machine_mode) ((int) mode + 1))
248 direct_load[(int) mode] = direct_store[(int) mode] = 0;
249 PUT_MODE (mem, mode);
250 PUT_MODE (mem1, mode);
251 PUT_MODE (reg, mode);
253 /* See if there is some register that can be used in this mode and
254 directly loaded or stored from memory. */
256 if (mode != VOIDmode && mode != BLKmode)
257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
258 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
261 if (! HARD_REGNO_MODE_OK (regno, mode))
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
271 SET_SRC (pat) = mem1;
272 SET_DEST (pat) = reg;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_load[(int) mode] = 1;
277 SET_DEST (pat) = mem;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
282 SET_DEST (pat) = mem1;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_store[(int) mode] = 1;
288 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
291 mode = GET_MODE_WIDER_MODE (mode))
293 enum machine_mode srcmode;
294 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
295 srcmode = GET_MODE_WIDER_MODE (srcmode))
299 ic = can_extend_p (mode, srcmode, 0);
300 if (ic == CODE_FOR_nothing)
303 PUT_MODE (mem, srcmode);
305 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
306 float_extend_from_mem[mode][srcmode] = true;
311 /* This is run at the start of compiling a function. */
316 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
319 /* Copy data from FROM to TO, where the machine modes are not the same.
320 Both modes may be integer, or both may be floating.
321 UNSIGNEDP should be nonzero if FROM is an unsigned type.
322 This causes zero-extension instead of sign-extension. */
325 convert_move (rtx to, rtx from, int unsignedp)
327 enum machine_mode to_mode = GET_MODE (to);
328 enum machine_mode from_mode = GET_MODE (from);
329 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
330 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
334 /* rtx code for making an equivalent value. */
335 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
336 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
339 gcc_assert (to_real == from_real);
341 /* If the source and destination are already the same, then there's
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
350 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
352 >= GET_MODE_SIZE (to_mode))
353 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
354 from = gen_lowpart (to_mode, from), from_mode = to_mode;
356 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
358 if (to_mode == from_mode
359 || (from_mode == VOIDmode && CONSTANT_P (from)))
361 emit_move_insn (to, from);
365 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
367 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
369 if (VECTOR_MODE_P (to_mode))
370 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
372 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
374 emit_move_insn (to, from);
378 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
380 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
381 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
390 gcc_assert (GET_MODE_PRECISION (from_mode)
391 != GET_MODE_PRECISION (to_mode));
393 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
398 /* Try converting directly if the insn is supported. */
400 code = tab->handlers[to_mode][from_mode].insn_code;
401 if (code != CODE_FOR_nothing)
403 emit_unop_insn (code, to, from,
404 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
408 /* Otherwise use a libcall. */
409 libcall = tab->handlers[to_mode][from_mode].libfunc;
411 /* Is this conversion implemented yet? */
412 gcc_assert (libcall);
415 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
417 insns = get_insns ();
419 emit_libcall_block (insns, to, value,
420 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
422 : gen_rtx_FLOAT_EXTEND (to_mode, from));
426 /* Handle pointer conversion. */ /* SPEE 900220. */
427 /* Targets are expected to provide conversion insns between PxImode and
428 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
429 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
434 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
435 != CODE_FOR_nothing);
437 if (full_mode != from_mode)
438 from = convert_to_mode (full_mode, from, unsignedp);
439 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
443 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
445 enum machine_mode full_mode
446 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
448 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
449 != CODE_FOR_nothing);
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
453 if (to_mode == full_mode)
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
460 /* Now both modes are integers. */
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
504 /* No special multiword conversion insn; do it by hand. */
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
517 lowpart_mode = from_mode;
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
524 /* Compute the value to put in each remaining word. */
526 fill_value = const0_rtx;
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
556 gcc_assert (subword);
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
562 insns = get_insns ();
565 emit_no_conflict_block (insns, to, from, NULL_RTX,
566 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
570 /* Truncating multi-word to a word or less. */
571 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
572 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
575 && ! MEM_VOLATILE_P (from)
576 && direct_load[(int) to_mode]
577 && ! mode_dependent_address_p (XEXP (from, 0)))
579 || GET_CODE (from) == SUBREG))
580 from = force_reg (from_mode, from);
581 convert_move (to, gen_lowpart (word_mode, from), 0);
585 /* Now follow all the conversions between integers
586 no more than a word long. */
588 /* For truncation, usually we can just refer to FROM in a narrower mode. */
589 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
590 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
591 GET_MODE_BITSIZE (from_mode)))
594 && ! MEM_VOLATILE_P (from)
595 && direct_load[(int) to_mode]
596 && ! mode_dependent_address_p (XEXP (from, 0)))
598 || GET_CODE (from) == SUBREG))
599 from = force_reg (from_mode, from);
600 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
601 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
602 from = copy_to_reg (from);
603 emit_move_insn (to, gen_lowpart (to_mode, from));
607 /* Handle extension. */
608 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
610 /* Convert directly if that works. */
611 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
615 from = force_not_mem (from);
617 emit_unop_insn (code, to, from, equiv_code);
622 enum machine_mode intermediate;
626 /* Search for a mode to convert via. */
627 for (intermediate = from_mode; intermediate != VOIDmode;
628 intermediate = GET_MODE_WIDER_MODE (intermediate))
629 if (((can_extend_p (to_mode, intermediate, unsignedp)
631 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (intermediate))))
634 && (can_extend_p (intermediate, from_mode, unsignedp)
635 != CODE_FOR_nothing))
637 convert_move (to, convert_to_mode (intermediate, from,
638 unsignedp), unsignedp);
642 /* No suitable intermediate mode.
643 Generate what we need with shifts. */
644 shift_amount = build_int_cst (NULL_TREE,
645 GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode));
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
653 emit_move_insn (to, tmp);
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
680 /* Mode combination is not recognized. */
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
694 return convert_modes (mode, VOIDmode, x, unsignedp);
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
736 HOST_WIDE_INT val = INTVAL (x);
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
741 int width = GET_MODE_BITSIZE (oldmode);
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
785 return gen_int_mode (val, mode);
788 return gen_lowpart (mode, x);
791 /* Converting from integer constant into mode is always equivalent to an
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
795 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
796 return simplify_gen_subreg (mode, x, oldmode, 0);
799 temp = gen_reg_rtx (mode);
800 convert_move (temp, x, unsignedp);
804 /* STORE_MAX_PIECES is the number of bytes at a time that we can
805 store efficiently. Due to internal GCC limitations, this is
806 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
807 for an immediate constant. */
809 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
811 /* Determine whether the LEN bytes can be moved by using several move
812 instructions. Return nonzero if a call to move_by_pieces should
816 can_move_by_pieces (unsigned HOST_WIDE_INT len,
817 unsigned int align ATTRIBUTE_UNUSED)
819 return MOVE_BY_PIECES_P (len, align);
822 /* Generate several move instructions to copy LEN bytes from block FROM to
823 block TO. (These are MEM rtx's with BLKmode).
825 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
826 used to push FROM to the stack.
828 ALIGN is maximum stack alignment we can assume.
830 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
831 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
835 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
836 unsigned int align, int endp)
838 struct move_by_pieces data;
839 rtx to_addr, from_addr = XEXP (from, 0);
840 unsigned int max_size = MOVE_MAX_PIECES + 1;
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
844 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
847 data.from_addr = from_addr;
850 to_addr = XEXP (to, 0);
853 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
854 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
856 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
863 #ifdef STACK_GROWS_DOWNWARD
869 data.to_addr = to_addr;
872 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
873 || GET_CODE (from_addr) == POST_INC
874 || GET_CODE (from_addr) == POST_DEC);
876 data.explicit_inc_from = 0;
877 data.explicit_inc_to = 0;
878 if (data.reverse) data.offset = len;
881 /* If copying requires more than two move insns,
882 copy addresses to registers (to make displacements shorter)
883 and use post-increment if available. */
884 if (!(data.autinc_from && data.autinc_to)
885 && move_by_pieces_ninsns (len, align, max_size) > 2)
887 /* Find the mode of the largest move... */
888 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
889 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
890 if (GET_MODE_SIZE (tmode) < max_size)
893 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
895 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
896 data.autinc_from = 1;
897 data.explicit_inc_from = -1;
899 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (from_addr);
902 data.autinc_from = 1;
903 data.explicit_inc_from = 1;
905 if (!data.autinc_from && CONSTANT_P (from_addr))
906 data.from_addr = copy_addr_to_reg (from_addr);
907 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
911 data.explicit_inc_to = -1;
913 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (to_addr);
917 data.explicit_inc_to = 1;
919 if (!data.autinc_to && CONSTANT_P (to_addr))
920 data.to_addr = copy_addr_to_reg (to_addr);
923 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
924 if (align >= GET_MODE_ALIGNMENT (tmode))
925 align = GET_MODE_ALIGNMENT (tmode);
928 enum machine_mode xmode;
930 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
932 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
933 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
934 || SLOW_UNALIGNED_ACCESS (tmode, align))
937 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
940 /* First move what we can in the largest integer mode, then go to
941 successively smaller modes. */
945 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
946 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
947 if (GET_MODE_SIZE (tmode) < max_size)
950 if (mode == VOIDmode)
953 icode = mov_optab->handlers[(int) mode].insn_code;
954 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
955 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
957 max_size = GET_MODE_SIZE (mode);
960 /* The code above should have handled everything. */
961 gcc_assert (!data.len);
967 gcc_assert (!data.reverse);
972 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
973 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
975 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
978 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
985 to1 = adjust_address (data.to, QImode, data.offset);
993 /* Return number of insns required to move L bytes by pieces.
994 ALIGN (in bits) is maximum alignment we can assume. */
996 static unsigned HOST_WIDE_INT
997 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
998 unsigned int max_size)
1000 unsigned HOST_WIDE_INT n_insns = 0;
1001 enum machine_mode tmode;
1003 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1004 if (align >= GET_MODE_ALIGNMENT (tmode))
1005 align = GET_MODE_ALIGNMENT (tmode);
1008 enum machine_mode tmode, xmode;
1010 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1012 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1013 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1014 || SLOW_UNALIGNED_ACCESS (tmode, align))
1017 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1020 while (max_size > 1)
1022 enum machine_mode mode = VOIDmode;
1023 enum insn_code icode;
1025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1027 if (GET_MODE_SIZE (tmode) < max_size)
1030 if (mode == VOIDmode)
1033 icode = mov_optab->handlers[(int) mode].insn_code;
1034 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1035 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1037 max_size = GET_MODE_SIZE (mode);
1044 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1045 with move instructions for mode MODE. GENFUN is the gen_... function
1046 to make a move insn for that mode. DATA has all the other info. */
1049 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1050 struct move_by_pieces *data)
1052 unsigned int size = GET_MODE_SIZE (mode);
1053 rtx to1 = NULL_RTX, from1;
1055 while (data->len >= size)
1058 data->offset -= size;
1062 if (data->autinc_to)
1063 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1066 to1 = adjust_address (data->to, mode, data->offset);
1069 if (data->autinc_from)
1070 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1073 from1 = adjust_address (data->from, mode, data->offset);
1075 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1076 emit_insn (gen_add2_insn (data->to_addr,
1077 GEN_INT (-(HOST_WIDE_INT)size)));
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1079 emit_insn (gen_add2_insn (data->from_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1083 emit_insn ((*genfun) (to1, from1));
1086 #ifdef PUSH_ROUNDING
1087 emit_single_push_insn (mode, from1, NULL);
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1094 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1095 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1096 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1098 if (! data->reverse)
1099 data->offset += size;
1105 /* Emit code to move a block Y to a block X. This may be done with
1106 string-move instructions, with multiple scalar move instructions,
1107 or with a library call.
1109 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1110 SIZE is an rtx that says how long they are.
1111 ALIGN is the maximum alignment we can assume they have.
1112 METHOD describes what kind of copy this is, and what mechanisms may be used.
1114 Return the address of the new block, if memcpy is called and returns it,
1118 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1126 case BLOCK_OP_NORMAL:
1127 may_use_call = true;
1130 case BLOCK_OP_CALL_PARM:
1131 may_use_call = block_move_libcall_safe_for_call_parm ();
1133 /* Make inhibit_defer_pop nonzero around the library call
1134 to force it to pop the arguments right away. */
1138 case BLOCK_OP_NO_LIBCALL:
1139 may_use_call = false;
1146 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1148 gcc_assert (MEM_P (x));
1149 gcc_assert (MEM_P (y));
1152 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1153 block copy is more efficient for other large modes, e.g. DCmode. */
1154 x = adjust_address (x, BLKmode, 0);
1155 y = adjust_address (y, BLKmode, 0);
1157 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1158 can be incorrect is coming from __builtin_memcpy. */
1159 if (GET_CODE (size) == CONST_INT)
1161 if (INTVAL (size) == 0)
1164 x = shallow_copy_rtx (x);
1165 y = shallow_copy_rtx (y);
1166 set_mem_size (x, size);
1167 set_mem_size (y, size);
1170 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1171 move_by_pieces (x, y, INTVAL (size), align, 0);
1172 else if (emit_block_move_via_movmem (x, y, size, align))
1174 else if (may_use_call)
1175 retval = emit_block_move_via_libcall (x, y, size);
1177 emit_block_move_via_loop (x, y, size, align);
1179 if (method == BLOCK_OP_CALL_PARM)
1185 /* A subroutine of emit_block_move. Returns true if calling the
1186 block move libcall will not clobber any parameters which may have
1187 already been placed on the stack. */
1190 block_move_libcall_safe_for_call_parm (void)
1192 /* If arguments are pushed on the stack, then they're safe. */
1196 /* If registers go on the stack anyway, any argument is sure to clobber
1197 an outgoing argument. */
1198 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1200 tree fn = emit_block_move_libcall_fn (false);
1202 if (REG_PARM_STACK_SPACE (fn) != 0)
1207 /* If any argument goes in memory, then it might clobber an outgoing
1210 CUMULATIVE_ARGS args_so_far;
1213 fn = emit_block_move_libcall_fn (false);
1214 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1216 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1217 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1219 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1220 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1221 if (!tmp || !REG_P (tmp))
1223 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1226 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1232 /* A subroutine of emit_block_move. Expand a movmem pattern;
1233 return true if successful. */
1236 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1238 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1239 int save_volatile_ok = volatile_ok;
1240 enum machine_mode mode;
1242 /* Since this is a move insn, we don't care about volatility. */
1245 /* Try the most limited insn first, because there's no point
1246 including more than one in the machine description unless
1247 the more limited one has some advantage. */
1249 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1250 mode = GET_MODE_WIDER_MODE (mode))
1252 enum insn_code code = movmem_optab[(int) mode];
1253 insn_operand_predicate_fn pred;
1255 if (code != CODE_FOR_nothing
1256 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1257 here because if SIZE is less than the mode mask, as it is
1258 returned by the macro, it will definitely be less than the
1259 actual mode mask. */
1260 && ((GET_CODE (size) == CONST_INT
1261 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1262 <= (GET_MODE_MASK (mode) >> 1)))
1263 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1264 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1265 || (*pred) (x, BLKmode))
1266 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1267 || (*pred) (y, BLKmode))
1268 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1269 || (*pred) (opalign, VOIDmode)))
1272 rtx last = get_last_insn ();
1275 op2 = convert_to_mode (mode, size, 1);
1276 pred = insn_data[(int) code].operand[2].predicate;
1277 if (pred != 0 && ! (*pred) (op2, mode))
1278 op2 = copy_to_mode_reg (mode, op2);
1280 /* ??? When called via emit_block_move_for_call, it'd be
1281 nice if there were some way to inform the backend, so
1282 that it doesn't fail the expansion because it thinks
1283 emitting the libcall would be more efficient. */
1285 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1289 volatile_ok = save_volatile_ok;
1293 delete_insns_since (last);
1297 volatile_ok = save_volatile_ok;
1301 /* A subroutine of emit_block_move. Expand a call to memcpy.
1302 Return the return value from memcpy, 0 otherwise. */
1305 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1307 rtx dst_addr, src_addr;
1308 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1309 enum machine_mode size_mode;
1312 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1313 pseudos. We can then place those new pseudos into a VAR_DECL and
1316 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1317 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1319 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1320 src_addr = convert_memory_address (ptr_mode, src_addr);
1322 dst_tree = make_tree (ptr_type_node, dst_addr);
1323 src_tree = make_tree (ptr_type_node, src_addr);
1325 size_mode = TYPE_MODE (sizetype);
1327 size = convert_to_mode (size_mode, size, 1);
1328 size = copy_to_mode_reg (size_mode, size);
1330 /* It is incorrect to use the libcall calling conventions to call
1331 memcpy in this context. This could be a user call to memcpy and
1332 the user may wish to examine the return value from memcpy. For
1333 targets where libcalls and normal calls have different conventions
1334 for returning pointers, we could end up generating incorrect code. */
1336 size_tree = make_tree (sizetype, size);
1338 fn = emit_block_move_libcall_fn (true);
1339 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1340 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1341 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1343 /* Now we have to build up the CALL_EXPR itself. */
1344 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1345 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1346 call_expr, arg_list, NULL_TREE);
1348 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1353 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1354 for the function we use for block copies. The first time FOR_CALL
1355 is true, we call assemble_external. */
1357 static GTY(()) tree block_move_fn;
1360 init_block_move_fn (const char *asmspec)
1366 fn = get_identifier ("memcpy");
1367 args = build_function_type_list (ptr_type_node, ptr_type_node,
1368 const_ptr_type_node, sizetype,
1371 fn = build_decl (FUNCTION_DECL, fn, args);
1372 DECL_EXTERNAL (fn) = 1;
1373 TREE_PUBLIC (fn) = 1;
1374 DECL_ARTIFICIAL (fn) = 1;
1375 TREE_NOTHROW (fn) = 1;
1381 set_user_assembler_name (block_move_fn, asmspec);
1385 emit_block_move_libcall_fn (int for_call)
1387 static bool emitted_extern;
1390 init_block_move_fn (NULL);
1392 if (for_call && !emitted_extern)
1394 emitted_extern = true;
1395 make_decl_rtl (block_move_fn);
1396 assemble_external (block_move_fn);
1399 return block_move_fn;
1402 /* A subroutine of emit_block_move. Copy the data via an explicit
1403 loop. This is used only when libcalls are forbidden. */
1404 /* ??? It'd be nice to copy in hunks larger than QImode. */
1407 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1408 unsigned int align ATTRIBUTE_UNUSED)
1410 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1411 enum machine_mode iter_mode;
1413 iter_mode = GET_MODE (size);
1414 if (iter_mode == VOIDmode)
1415 iter_mode = word_mode;
1417 top_label = gen_label_rtx ();
1418 cmp_label = gen_label_rtx ();
1419 iter = gen_reg_rtx (iter_mode);
1421 emit_move_insn (iter, const0_rtx);
1423 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1424 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1425 do_pending_stack_adjust ();
1427 emit_jump (cmp_label);
1428 emit_label (top_label);
1430 tmp = convert_modes (Pmode, iter_mode, iter, true);
1431 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1432 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1433 x = change_address (x, QImode, x_addr);
1434 y = change_address (y, QImode, y_addr);
1436 emit_move_insn (x, y);
1438 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1439 true, OPTAB_LIB_WIDEN);
1441 emit_move_insn (iter, tmp);
1443 emit_label (cmp_label);
1445 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1449 /* Copy all or part of a value X into registers starting at REGNO.
1450 The number of registers to be filled is NREGS. */
1453 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1456 #ifdef HAVE_load_multiple
1464 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1465 x = validize_mem (force_const_mem (mode, x));
1467 /* See if the machine can do this with a load multiple insn. */
1468 #ifdef HAVE_load_multiple
1469 if (HAVE_load_multiple)
1471 last = get_last_insn ();
1472 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1480 delete_insns_since (last);
1484 for (i = 0; i < nregs; i++)
1485 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1486 operand_subword_force (x, i, mode));
1489 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1490 The number of registers to be filled is NREGS. */
1493 move_block_from_reg (int regno, rtx x, int nregs)
1500 /* See if the machine can do this with a store multiple insn. */
1501 #ifdef HAVE_store_multiple
1502 if (HAVE_store_multiple)
1504 rtx last = get_last_insn ();
1505 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1513 delete_insns_since (last);
1517 for (i = 0; i < nregs; i++)
1519 rtx tem = operand_subword (x, i, 1, BLKmode);
1523 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1527 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1528 ORIG, where ORIG is a non-consecutive group of registers represented by
1529 a PARALLEL. The clone is identical to the original except in that the
1530 original set of registers is replaced by a new set of pseudo registers.
1531 The new set has the same modes as the original set. */
1534 gen_group_rtx (rtx orig)
1539 gcc_assert (GET_CODE (orig) == PARALLEL);
1541 length = XVECLEN (orig, 0);
1542 tmps = alloca (sizeof (rtx) * length);
1544 /* Skip a NULL entry in first slot. */
1545 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1550 for (; i < length; i++)
1552 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1553 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1555 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1558 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1561 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1562 except that values are placed in TMPS[i], and must later be moved
1563 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1566 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1570 enum machine_mode m = GET_MODE (orig_src);
1572 gcc_assert (GET_CODE (dst) == PARALLEL);
1575 && !SCALAR_INT_MODE_P (m)
1576 && !MEM_P (orig_src)
1577 && GET_CODE (orig_src) != CONCAT)
1579 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1580 if (imode == BLKmode)
1581 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1583 src = gen_reg_rtx (imode);
1584 if (imode != BLKmode)
1585 src = gen_lowpart (GET_MODE (orig_src), src);
1586 emit_move_insn (src, orig_src);
1587 /* ...and back again. */
1588 if (imode != BLKmode)
1589 src = gen_lowpart (imode, src);
1590 emit_group_load_1 (tmps, dst, src, type, ssize);
1594 /* Check for a NULL entry, used to indicate that the parameter goes
1595 both on the stack and in registers. */
1596 if (XEXP (XVECEXP (dst, 0, 0), 0))
1601 /* Process the pieces. */
1602 for (i = start; i < XVECLEN (dst, 0); i++)
1604 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1605 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1606 unsigned int bytelen = GET_MODE_SIZE (mode);
1609 /* Handle trailing fragments that run over the size of the struct. */
1610 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1612 /* Arrange to shift the fragment to where it belongs.
1613 extract_bit_field loads to the lsb of the reg. */
1615 #ifdef BLOCK_REG_PADDING
1616 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1617 == (BYTES_BIG_ENDIAN ? upward : downward)
1622 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1623 bytelen = ssize - bytepos;
1624 gcc_assert (bytelen > 0);
1627 /* If we won't be loading directly from memory, protect the real source
1628 from strange tricks we might play; but make sure that the source can
1629 be loaded directly into the destination. */
1631 if (!MEM_P (orig_src)
1632 && (!CONSTANT_P (orig_src)
1633 || (GET_MODE (orig_src) != mode
1634 && GET_MODE (orig_src) != VOIDmode)))
1636 if (GET_MODE (orig_src) == VOIDmode)
1637 src = gen_reg_rtx (mode);
1639 src = gen_reg_rtx (GET_MODE (orig_src));
1641 emit_move_insn (src, orig_src);
1644 /* Optimize the access just a bit. */
1646 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1647 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1648 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1649 && bytelen == GET_MODE_SIZE (mode))
1651 tmps[i] = gen_reg_rtx (mode);
1652 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1654 else if (COMPLEX_MODE_P (mode)
1655 && GET_MODE (src) == mode
1656 && bytelen == GET_MODE_SIZE (mode))
1657 /* Let emit_move_complex do the bulk of the work. */
1659 else if (GET_CODE (src) == CONCAT)
1661 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1662 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1664 if ((bytepos == 0 && bytelen == slen0)
1665 || (bytepos != 0 && bytepos + bytelen <= slen))
1667 /* The following assumes that the concatenated objects all
1668 have the same size. In this case, a simple calculation
1669 can be used to determine the object and the bit field
1671 tmps[i] = XEXP (src, bytepos / slen0);
1672 if (! CONSTANT_P (tmps[i])
1673 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1674 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1675 (bytepos % slen0) * BITS_PER_UNIT,
1676 1, NULL_RTX, mode, mode);
1682 gcc_assert (!bytepos);
1683 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1684 emit_move_insn (mem, src);
1685 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1686 0, 1, NULL_RTX, mode, mode);
1689 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1690 SIMD register, which is currently broken. While we get GCC
1691 to emit proper RTL for these cases, let's dump to memory. */
1692 else if (VECTOR_MODE_P (GET_MODE (dst))
1695 int slen = GET_MODE_SIZE (GET_MODE (src));
1698 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1699 emit_move_insn (mem, src);
1700 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1702 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1703 && XVECLEN (dst, 0) > 1)
1704 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1705 else if (CONSTANT_P (src)
1706 || (REG_P (src) && GET_MODE (src) == mode))
1709 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1710 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1714 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1715 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1719 /* Emit code to move a block SRC of type TYPE to a block DST,
1720 where DST is non-consecutive registers represented by a PARALLEL.
1721 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1725 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1730 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1731 emit_group_load_1 (tmps, dst, src, type, ssize);
1733 /* Copy the extracted pieces into the proper (probable) hard regs. */
1734 for (i = 0; i < XVECLEN (dst, 0); i++)
1736 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1739 emit_move_insn (d, tmps[i]);
1743 /* Similar, but load SRC into new pseudos in a format that looks like
1744 PARALLEL. This can later be fed to emit_group_move to get things
1745 in the right place. */
1748 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1753 vec = rtvec_alloc (XVECLEN (parallel, 0));
1754 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1756 /* Convert the vector to look just like the original PARALLEL, except
1757 with the computed values. */
1758 for (i = 0; i < XVECLEN (parallel, 0); i++)
1760 rtx e = XVECEXP (parallel, 0, i);
1761 rtx d = XEXP (e, 0);
1765 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1766 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1768 RTVEC_ELT (vec, i) = e;
1771 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1774 /* Emit code to move a block SRC to block DST, where SRC and DST are
1775 non-consecutive groups of registers, each represented by a PARALLEL. */
1778 emit_group_move (rtx dst, rtx src)
1782 gcc_assert (GET_CODE (src) == PARALLEL
1783 && GET_CODE (dst) == PARALLEL
1784 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1786 /* Skip first entry if NULL. */
1787 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1788 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1789 XEXP (XVECEXP (src, 0, i), 0));
1792 /* Move a group of registers represented by a PARALLEL into pseudos. */
1795 emit_group_move_into_temps (rtx src)
1797 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1800 for (i = 0; i < XVECLEN (src, 0); i++)
1802 rtx e = XVECEXP (src, 0, i);
1803 rtx d = XEXP (e, 0);
1806 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1807 RTVEC_ELT (vec, i) = e;
1810 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1813 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1814 where SRC is non-consecutive registers represented by a PARALLEL.
1815 SSIZE represents the total size of block ORIG_DST, or -1 if not
1819 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1823 enum machine_mode m = GET_MODE (orig_dst);
1825 gcc_assert (GET_CODE (src) == PARALLEL);
1827 if (!SCALAR_INT_MODE_P (m)
1828 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1830 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1831 if (imode == BLKmode)
1832 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1834 dst = gen_reg_rtx (imode);
1835 emit_group_store (dst, src, type, ssize);
1836 if (imode != BLKmode)
1837 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1838 emit_move_insn (orig_dst, dst);
1842 /* Check for a NULL entry, used to indicate that the parameter goes
1843 both on the stack and in registers. */
1844 if (XEXP (XVECEXP (src, 0, 0), 0))
1849 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1851 /* Copy the (probable) hard regs into pseudos. */
1852 for (i = start; i < XVECLEN (src, 0); i++)
1854 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1855 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1856 emit_move_insn (tmps[i], reg);
1859 /* If we won't be storing directly into memory, protect the real destination
1860 from strange tricks we might play. */
1862 if (GET_CODE (dst) == PARALLEL)
1866 /* We can get a PARALLEL dst if there is a conditional expression in
1867 a return statement. In that case, the dst and src are the same,
1868 so no action is necessary. */
1869 if (rtx_equal_p (dst, src))
1872 /* It is unclear if we can ever reach here, but we may as well handle
1873 it. Allocate a temporary, and split this into a store/load to/from
1876 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1877 emit_group_store (temp, src, type, ssize);
1878 emit_group_load (dst, temp, type, ssize);
1881 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1883 dst = gen_reg_rtx (GET_MODE (orig_dst));
1884 /* Make life a bit easier for combine. */
1885 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1888 /* Process the pieces. */
1889 for (i = start; i < XVECLEN (src, 0); i++)
1891 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1892 enum machine_mode mode = GET_MODE (tmps[i]);
1893 unsigned int bytelen = GET_MODE_SIZE (mode);
1896 /* Handle trailing fragments that run over the size of the struct. */
1897 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1899 /* store_bit_field always takes its value from the lsb.
1900 Move the fragment to the lsb if it's not already there. */
1902 #ifdef BLOCK_REG_PADDING
1903 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1904 == (BYTES_BIG_ENDIAN ? upward : downward)
1910 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1911 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1912 build_int_cst (NULL_TREE, shift),
1915 bytelen = ssize - bytepos;
1918 if (GET_CODE (dst) == CONCAT)
1920 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1921 dest = XEXP (dst, 0);
1922 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1924 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1925 dest = XEXP (dst, 1);
1929 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1930 dest = assign_stack_temp (GET_MODE (dest),
1931 GET_MODE_SIZE (GET_MODE (dest)), 0);
1932 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1939 /* Optimize the access just a bit. */
1941 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1942 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1943 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1944 && bytelen == GET_MODE_SIZE (mode))
1945 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1947 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1951 /* Copy from the pseudo into the (probable) hard reg. */
1952 if (orig_dst != dst)
1953 emit_move_insn (orig_dst, dst);
1956 /* Generate code to copy a BLKmode object of TYPE out of a
1957 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1958 is null, a stack temporary is created. TGTBLK is returned.
1960 The purpose of this routine is to handle functions that return
1961 BLKmode structures in registers. Some machines (the PA for example)
1962 want to return all small structures in registers regardless of the
1963 structure's alignment. */
1966 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1968 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1969 rtx src = NULL, dst = NULL;
1970 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1971 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1975 tgtblk = assign_temp (build_qualified_type (type,
1977 | TYPE_QUAL_CONST)),
1979 preserve_temp_slots (tgtblk);
1982 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1983 into a new pseudo which is a full word. */
1985 if (GET_MODE (srcreg) != BLKmode
1986 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1987 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1989 /* If the structure doesn't take up a whole number of words, see whether
1990 SRCREG is padded on the left or on the right. If it's on the left,
1991 set PADDING_CORRECTION to the number of bits to skip.
1993 In most ABIs, the structure will be returned at the least end of
1994 the register, which translates to right padding on little-endian
1995 targets and left padding on big-endian targets. The opposite
1996 holds if the structure is returned at the most significant
1997 end of the register. */
1998 if (bytes % UNITS_PER_WORD != 0
1999 && (targetm.calls.return_in_msb (type)
2001 : BYTES_BIG_ENDIAN))
2003 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2005 /* Copy the structure BITSIZE bites at a time.
2007 We could probably emit more efficient code for machines which do not use
2008 strict alignment, but it doesn't seem worth the effort at the current
2010 for (bitpos = 0, xbitpos = padding_correction;
2011 bitpos < bytes * BITS_PER_UNIT;
2012 bitpos += bitsize, xbitpos += bitsize)
2014 /* We need a new source operand each time xbitpos is on a
2015 word boundary and when xbitpos == padding_correction
2016 (the first time through). */
2017 if (xbitpos % BITS_PER_WORD == 0
2018 || xbitpos == padding_correction)
2019 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2022 /* We need a new destination operand each time bitpos is on
2024 if (bitpos % BITS_PER_WORD == 0)
2025 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2027 /* Use xbitpos for the source extraction (right justified) and
2028 xbitpos for the destination store (left justified). */
2029 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2030 extract_bit_field (src, bitsize,
2031 xbitpos % BITS_PER_WORD, 1,
2032 NULL_RTX, word_mode, word_mode));
2038 /* Add a USE expression for REG to the (possibly empty) list pointed
2039 to by CALL_FUSAGE. REG must denote a hard register. */
2042 use_reg (rtx *call_fusage, rtx reg)
2044 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2047 = gen_rtx_EXPR_LIST (VOIDmode,
2048 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2051 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2052 starting at REGNO. All of these registers must be hard registers. */
2055 use_regs (rtx *call_fusage, int regno, int nregs)
2059 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2061 for (i = 0; i < nregs; i++)
2062 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2065 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2066 PARALLEL REGS. This is for calls that pass values in multiple
2067 non-contiguous locations. The Irix 6 ABI has examples of this. */
2070 use_group_regs (rtx *call_fusage, rtx regs)
2074 for (i = 0; i < XVECLEN (regs, 0); i++)
2076 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2078 /* A NULL entry means the parameter goes both on the stack and in
2079 registers. This can also be a MEM for targets that pass values
2080 partially on the stack and partially in registers. */
2081 if (reg != 0 && REG_P (reg))
2082 use_reg (call_fusage, reg);
2087 /* Determine whether the LEN bytes generated by CONSTFUN can be
2088 stored to memory using several move instructions. CONSTFUNDATA is
2089 a pointer which will be passed as argument in every CONSTFUN call.
2090 ALIGN is maximum alignment we can assume. Return nonzero if a
2091 call to store_by_pieces should succeed. */
2094 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2095 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2096 void *constfundata, unsigned int align)
2098 unsigned HOST_WIDE_INT l;
2099 unsigned int max_size;
2100 HOST_WIDE_INT offset = 0;
2101 enum machine_mode mode, tmode;
2102 enum insn_code icode;
2109 if (! STORE_BY_PIECES_P (len, align))
2112 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2113 if (align >= GET_MODE_ALIGNMENT (tmode))
2114 align = GET_MODE_ALIGNMENT (tmode);
2117 enum machine_mode xmode;
2119 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2121 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2122 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2123 || SLOW_UNALIGNED_ACCESS (tmode, align))
2126 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2129 /* We would first store what we can in the largest integer mode, then go to
2130 successively smaller modes. */
2133 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2138 max_size = STORE_MAX_PIECES + 1;
2139 while (max_size > 1)
2141 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2142 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2143 if (GET_MODE_SIZE (tmode) < max_size)
2146 if (mode == VOIDmode)
2149 icode = mov_optab->handlers[(int) mode].insn_code;
2150 if (icode != CODE_FOR_nothing
2151 && align >= GET_MODE_ALIGNMENT (mode))
2153 unsigned int size = GET_MODE_SIZE (mode);
2160 cst = (*constfun) (constfundata, offset, mode);
2161 if (!LEGITIMATE_CONSTANT_P (cst))
2171 max_size = GET_MODE_SIZE (mode);
2174 /* The code above should have handled everything. */
2181 /* Generate several move instructions to store LEN bytes generated by
2182 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2183 pointer which will be passed as argument in every CONSTFUN call.
2184 ALIGN is maximum alignment we can assume.
2185 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2186 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2190 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2191 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2192 void *constfundata, unsigned int align, int endp)
2194 struct store_by_pieces data;
2198 gcc_assert (endp != 2);
2202 gcc_assert (STORE_BY_PIECES_P (len, align));
2203 data.constfun = constfun;
2204 data.constfundata = constfundata;
2207 store_by_pieces_1 (&data, align);
2212 gcc_assert (!data.reverse);
2217 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2218 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2220 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2223 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2230 to1 = adjust_address (data.to, QImode, data.offset);
2238 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2239 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2242 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2244 struct store_by_pieces data;
2249 data.constfun = clear_by_pieces_1;
2250 data.constfundata = NULL;
2253 store_by_pieces_1 (&data, align);
2256 /* Callback routine for clear_by_pieces.
2257 Return const0_rtx unconditionally. */
2260 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2261 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2262 enum machine_mode mode ATTRIBUTE_UNUSED)
2267 /* Subroutine of clear_by_pieces and store_by_pieces.
2268 Generate several move instructions to store LEN bytes of block TO. (A MEM
2269 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2272 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2273 unsigned int align ATTRIBUTE_UNUSED)
2275 rtx to_addr = XEXP (data->to, 0);
2276 unsigned int max_size = STORE_MAX_PIECES + 1;
2277 enum machine_mode mode = VOIDmode, tmode;
2278 enum insn_code icode;
2281 data->to_addr = to_addr;
2283 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2284 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2286 data->explicit_inc_to = 0;
2288 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2290 data->offset = data->len;
2292 /* If storing requires more than two move insns,
2293 copy addresses to registers (to make displacements shorter)
2294 and use post-increment if available. */
2295 if (!data->autinc_to
2296 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2298 /* Determine the main mode we'll be using. */
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2304 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2306 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2307 data->autinc_to = 1;
2308 data->explicit_inc_to = -1;
2311 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2312 && ! data->autinc_to)
2314 data->to_addr = copy_addr_to_reg (to_addr);
2315 data->autinc_to = 1;
2316 data->explicit_inc_to = 1;
2319 if ( !data->autinc_to && CONSTANT_P (to_addr))
2320 data->to_addr = copy_addr_to_reg (to_addr);
2323 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2324 if (align >= GET_MODE_ALIGNMENT (tmode))
2325 align = GET_MODE_ALIGNMENT (tmode);
2328 enum machine_mode xmode;
2330 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2332 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2333 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2334 || SLOW_UNALIGNED_ACCESS (tmode, align))
2337 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2340 /* First store what we can in the largest integer mode, then go to
2341 successively smaller modes. */
2343 while (max_size > 1)
2345 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2346 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2347 if (GET_MODE_SIZE (tmode) < max_size)
2350 if (mode == VOIDmode)
2353 icode = mov_optab->handlers[(int) mode].insn_code;
2354 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2355 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2357 max_size = GET_MODE_SIZE (mode);
2360 /* The code above should have handled everything. */
2361 gcc_assert (!data->len);
2364 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2365 with move instructions for mode MODE. GENFUN is the gen_... function
2366 to make a move insn for that mode. DATA has all the other info. */
2369 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2370 struct store_by_pieces *data)
2372 unsigned int size = GET_MODE_SIZE (mode);
2375 while (data->len >= size)
2378 data->offset -= size;
2380 if (data->autinc_to)
2381 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2384 to1 = adjust_address (data->to, mode, data->offset);
2386 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2387 emit_insn (gen_add2_insn (data->to_addr,
2388 GEN_INT (-(HOST_WIDE_INT) size)));
2390 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2391 emit_insn ((*genfun) (to1, cst));
2393 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2394 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2396 if (! data->reverse)
2397 data->offset += size;
2403 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2404 its length in bytes. */
2407 clear_storage (rtx object, rtx size)
2409 enum machine_mode mode = GET_MODE (object);
2412 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2413 just move a zero. Otherwise, do this a piece at a time. */
2415 && GET_CODE (size) == CONST_INT
2416 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2418 rtx zero = CONST0_RTX (mode);
2421 emit_move_insn (object, zero);
2425 if (COMPLEX_MODE_P (mode))
2427 zero = CONST0_RTX (GET_MODE_INNER (mode));
2430 write_complex_part (object, zero, 0);
2431 write_complex_part (object, zero, 1);
2437 if (size == const0_rtx)
2440 align = MEM_ALIGN (object);
2442 if (GET_CODE (size) == CONST_INT
2443 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2444 clear_by_pieces (object, INTVAL (size), align);
2445 else if (clear_storage_via_clrmem (object, size, align))
2448 return clear_storage_via_libcall (object, size);
2453 /* A subroutine of clear_storage. Expand a clrmem pattern;
2454 return true if successful. */
2457 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2459 /* Try the most limited insn first, because there's no point
2460 including more than one in the machine description unless
2461 the more limited one has some advantage. */
2463 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2464 enum machine_mode mode;
2466 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2467 mode = GET_MODE_WIDER_MODE (mode))
2469 enum insn_code code = clrmem_optab[(int) mode];
2470 insn_operand_predicate_fn pred;
2472 if (code != CODE_FOR_nothing
2473 /* We don't need MODE to be narrower than
2474 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2475 the mode mask, as it is returned by the macro, it will
2476 definitely be less than the actual mode mask. */
2477 && ((GET_CODE (size) == CONST_INT
2478 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2479 <= (GET_MODE_MASK (mode) >> 1)))
2480 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2481 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2482 || (*pred) (object, BLKmode))
2483 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2484 || (*pred) (opalign, VOIDmode)))
2487 rtx last = get_last_insn ();
2490 op1 = convert_to_mode (mode, size, 1);
2491 pred = insn_data[(int) code].operand[1].predicate;
2492 if (pred != 0 && ! (*pred) (op1, mode))
2493 op1 = copy_to_mode_reg (mode, op1);
2495 pat = GEN_FCN ((int) code) (object, op1, opalign);
2502 delete_insns_since (last);
2509 /* A subroutine of clear_storage. Expand a call to memset.
2510 Return the return value of memset, 0 otherwise. */
2513 clear_storage_via_libcall (rtx object, rtx size)
2515 tree call_expr, arg_list, fn, object_tree, size_tree;
2516 enum machine_mode size_mode;
2519 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2520 place those into new pseudos into a VAR_DECL and use them later. */
2522 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2524 size_mode = TYPE_MODE (sizetype);
2525 size = convert_to_mode (size_mode, size, 1);
2526 size = copy_to_mode_reg (size_mode, size);
2528 /* It is incorrect to use the libcall calling conventions to call
2529 memset in this context. This could be a user call to memset and
2530 the user may wish to examine the return value from memset. For
2531 targets where libcalls and normal calls have different conventions
2532 for returning pointers, we could end up generating incorrect code. */
2534 object_tree = make_tree (ptr_type_node, object);
2535 size_tree = make_tree (sizetype, size);
2537 fn = clear_storage_libcall_fn (true);
2538 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2539 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2540 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2542 /* Now we have to build up the CALL_EXPR itself. */
2543 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2544 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2545 call_expr, arg_list, NULL_TREE);
2547 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2552 /* A subroutine of clear_storage_via_libcall. Create the tree node
2553 for the function we use for block clears. The first time FOR_CALL
2554 is true, we call assemble_external. */
2556 static GTY(()) tree block_clear_fn;
2559 init_block_clear_fn (const char *asmspec)
2561 if (!block_clear_fn)
2565 fn = get_identifier ("memset");
2566 args = build_function_type_list (ptr_type_node, ptr_type_node,
2567 integer_type_node, sizetype,
2570 fn = build_decl (FUNCTION_DECL, fn, args);
2571 DECL_EXTERNAL (fn) = 1;
2572 TREE_PUBLIC (fn) = 1;
2573 DECL_ARTIFICIAL (fn) = 1;
2574 TREE_NOTHROW (fn) = 1;
2576 block_clear_fn = fn;
2580 set_user_assembler_name (block_clear_fn, asmspec);
2584 clear_storage_libcall_fn (int for_call)
2586 static bool emitted_extern;
2588 if (!block_clear_fn)
2589 init_block_clear_fn (NULL);
2591 if (for_call && !emitted_extern)
2593 emitted_extern = true;
2594 make_decl_rtl (block_clear_fn);
2595 assemble_external (block_clear_fn);
2598 return block_clear_fn;
2601 /* Write to one of the components of the complex value CPLX. Write VAL to
2602 the real part if IMAG_P is false, and the imaginary part if its true. */
2605 write_complex_part (rtx cplx, rtx val, bool imag_p)
2607 enum machine_mode cmode;
2608 enum machine_mode imode;
2611 if (GET_CODE (cplx) == CONCAT)
2613 emit_move_insn (XEXP (cplx, imag_p), val);
2617 cmode = GET_MODE (cplx);
2618 imode = GET_MODE_INNER (cmode);
2619 ibitsize = GET_MODE_BITSIZE (imode);
2621 /* If the sub-object is at least word sized, then we know that subregging
2622 will work. This special case is important, since store_bit_field
2623 wants to operate on integer modes, and there's rarely an OImode to
2624 correspond to TCmode. */
2625 if (ibitsize >= BITS_PER_WORD
2626 /* For hard regs we have exact predicates. Assume we can split
2627 the original object if it spans an even number of hard regs.
2628 This special case is important for SCmode on 64-bit platforms
2629 where the natural size of floating-point regs is 32-bit. */
2630 || (GET_CODE (cplx) == REG
2631 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2632 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2633 /* For MEMs we always try to make a "subreg", that is to adjust
2634 the MEM, because store_bit_field may generate overly
2635 convoluted RTL for sub-word fields. */
2638 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2639 imag_p ? GET_MODE_SIZE (imode) : 0);
2642 emit_move_insn (part, val);
2646 /* simplify_gen_subreg may fail for sub-word MEMs. */
2647 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2650 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2653 /* Extract one of the components of the complex value CPLX. Extract the
2654 real part if IMAG_P is false, and the imaginary part if it's true. */
2657 read_complex_part (rtx cplx, bool imag_p)
2659 enum machine_mode cmode, imode;
2662 if (GET_CODE (cplx) == CONCAT)
2663 return XEXP (cplx, imag_p);
2665 cmode = GET_MODE (cplx);
2666 imode = GET_MODE_INNER (cmode);
2667 ibitsize = GET_MODE_BITSIZE (imode);
2669 /* Special case reads from complex constants that got spilled to memory. */
2670 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2672 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2673 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2675 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2676 if (CONSTANT_CLASS_P (part))
2677 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2681 /* If the sub-object is at least word sized, then we know that subregging
2682 will work. This special case is important, since extract_bit_field
2683 wants to operate on integer modes, and there's rarely an OImode to
2684 correspond to TCmode. */
2685 if (ibitsize >= BITS_PER_WORD
2686 /* For hard regs we have exact predicates. Assume we can split
2687 the original object if it spans an even number of hard regs.
2688 This special case is important for SCmode on 64-bit platforms
2689 where the natural size of floating-point regs is 32-bit. */
2690 || (GET_CODE (cplx) == REG
2691 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2692 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2693 /* For MEMs we always try to make a "subreg", that is to adjust
2694 the MEM, because extract_bit_field may generate overly
2695 convoluted RTL for sub-word fields. */
2698 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2699 imag_p ? GET_MODE_SIZE (imode) : 0);
2703 /* simplify_gen_subreg may fail for sub-word MEMs. */
2704 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2707 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2708 true, NULL_RTX, imode, imode);
2711 /* A subroutine of emit_move_via_alt_mode. Yet another lowpart generator.
2712 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2713 represented in NEW_MODE. */
2716 emit_move_change_mode (enum machine_mode new_mode,
2717 enum machine_mode old_mode, rtx x)
2721 if (reload_in_progress && MEM_P (x))
2723 /* We can't use gen_lowpart here because it may call change_address
2724 which is not appropriate if we were called when a reload was in
2725 progress. We don't have to worry about changing the address since
2726 the size in bytes is supposed to be the same. Copy the MEM to
2727 change the mode and move any substitutions from the old MEM to
2730 ret = adjust_address_nv (x, new_mode, 0);
2731 copy_replacements (x, ret);
2735 /* Note that we do want simplify_subreg's behaviour of validating
2736 that the new mode is ok for a hard register. If we were to use
2737 simplify_gen_subreg, we would create the subreg, but would
2738 probably run into the target not being able to implement it. */
2739 ret = simplify_subreg (new_mode, x, old_mode, 0);
2745 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2746 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2747 code for the move in ALT_MODE, and is known to be valid. Returns the
2748 instruction emitted, or NULL if X or Y cannot be represented in ALT_MODE. */
2751 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2752 enum insn_code code, rtx x, rtx y)
2754 x = emit_move_change_mode (alt_mode, mode, x);
2757 y = emit_move_change_mode (alt_mode, mode, y);
2760 return emit_insn (GEN_FCN (code) (x, y));
2763 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2764 an integer mode of the same size as MODE. Returns the instruction
2765 emitted, or NULL if such a move could not be generated. */
2768 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2770 enum machine_mode imode;
2771 enum insn_code code;
2773 /* There must exist a mode of the exact size we require. */
2774 imode = int_mode_for_mode (mode);
2775 if (imode == BLKmode)
2778 /* The target must support moves in this mode. */
2779 code = mov_optab->handlers[imode].insn_code;
2780 if (code == CODE_FOR_nothing)
2783 return emit_move_via_alt_mode (imode, mode, code, x, y);
2786 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2787 Return an equivalent MEM that does not use an auto-increment. */
2790 emit_move_resolve_push (enum machine_mode mode, rtx x)
2792 enum rtx_code code = GET_CODE (XEXP (x, 0));
2793 HOST_WIDE_INT adjust;
2796 adjust = GET_MODE_SIZE (mode);
2797 #ifdef PUSH_ROUNDING
2798 adjust = PUSH_ROUNDING (adjust);
2800 if (code == PRE_DEC || code == POST_DEC)
2803 /* Do not use anti_adjust_stack, since we don't want to update
2804 stack_pointer_delta. */
2805 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2806 GEN_INT (adjust), stack_pointer_rtx,
2807 0, OPTAB_LIB_WIDEN);
2808 if (temp != stack_pointer_rtx)
2809 emit_move_insn (stack_pointer_rtx, temp);
2815 temp = stack_pointer_rtx;
2818 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2821 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2827 return replace_equiv_address (x, temp);
2830 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2831 X is known to satisfy push_operand, and MODE is known to be complex.
2832 Returns the last instruction emitted. */
2835 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2837 enum machine_mode submode = GET_MODE_INNER (mode);
2840 #ifdef PUSH_ROUNDING
2841 unsigned int submodesize = GET_MODE_SIZE (submode);
2843 /* In case we output to the stack, but the size is smaller than the
2844 machine can push exactly, we need to use move instructions. */
2845 if (PUSH_ROUNDING (submodesize) != submodesize)
2847 x = emit_move_resolve_push (mode, x);
2848 return emit_move_insn (x, y);
2852 /* Note that the real part always precedes the imag part in memory
2853 regardless of machine's endianness. */
2854 switch (GET_CODE (XEXP (x, 0)))
2868 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 read_complex_part (y, imag_first));
2870 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 read_complex_part (y, !imag_first));
2874 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2875 MODE is known to be complex. Returns the last instruction emitted. */
2878 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2882 /* Need to take special care for pushes, to maintain proper ordering
2883 of the data, and possibly extra padding. */
2884 if (push_operand (x, mode))
2885 return emit_move_complex_push (mode, x, y);
2887 /* For memory to memory moves, optimial behaviour can be had with the
2888 existing block move logic. */
2889 if (MEM_P (x) && MEM_P (y))
2891 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2892 BLOCK_OP_NO_LIBCALL);
2893 return get_last_insn ();
2896 /* See if we can coerce the target into moving both values at once. */
2898 /* Not possible if the values are inherently not adjacent. */
2899 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2901 /* Is possible if both are registers (or subregs of registers). */
2902 else if (register_operand (x, mode) && register_operand (y, mode))
2904 /* If one of the operands is a memory, and alignment constraints
2905 are friendly enough, we may be able to do combined memory operations.
2906 We do not attempt this if Y is a constant because that combination is
2907 usually better with the by-parts thing below. */
2908 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2909 && (!STRICT_ALIGNMENT
2910 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2917 rtx ret = emit_move_via_integer (mode, x, y);
2922 /* Show the output dies here. This is necessary for SUBREGs
2923 of pseudos since we cannot track their lifetimes correctly;
2924 hard regs shouldn't appear here except as return values. */
2925 if (!reload_completed && !reload_in_progress
2926 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2927 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2929 write_complex_part (x, read_complex_part (y, false), false);
2930 write_complex_part (x, read_complex_part (y, true), true);
2931 return get_last_insn ();
2934 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2935 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2938 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2942 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2945 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2946 if (code != CODE_FOR_nothing)
2947 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2950 /* Otherwise, find the MODE_INT mode of the same width. */
2951 ret = emit_move_via_integer (mode, x, y);
2952 gcc_assert (ret != NULL);
2956 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2957 MODE is any multi-word or full-word mode that lacks a move_insn
2958 pattern. Note that you will get better code if you define such
2959 patterns, even if they must turn into multiple assembler instructions. */
2962 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2969 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2971 /* If X is a push on the stack, do the push now and replace
2972 X with a reference to the stack pointer. */
2973 if (push_operand (x, mode))
2974 x = emit_move_resolve_push (mode, x);
2976 /* If we are in reload, see if either operand is a MEM whose address
2977 is scheduled for replacement. */
2978 if (reload_in_progress && MEM_P (x)
2979 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2980 x = replace_equiv_address_nv (x, inner);
2981 if (reload_in_progress && MEM_P (y)
2982 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2983 y = replace_equiv_address_nv (y, inner);
2987 need_clobber = false;
2989 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2992 rtx xpart = operand_subword (x, i, 1, mode);
2993 rtx ypart = operand_subword (y, i, 1, mode);
2995 /* If we can't get a part of Y, put Y into memory if it is a
2996 constant. Otherwise, force it into a register. If we still
2997 can't get a part of Y, abort. */
2998 if (ypart == 0 && CONSTANT_P (y))
3000 y = force_const_mem (mode, y);
3001 ypart = operand_subword (y, i, 1, mode);
3003 else if (ypart == 0)
3004 ypart = operand_subword_force (y, i, mode);
3006 gcc_assert (xpart && ypart);
3008 need_clobber |= (GET_CODE (xpart) == SUBREG);
3010 last_insn = emit_move_insn (xpart, ypart);
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3021 && ! (reload_in_progress || reload_completed)
3022 && need_clobber != 0)
3023 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3030 /* Low level part of emit_move_insn.
3031 Called just like emit_move_insn, but assumes X and Y
3032 are basically valid. */
3035 emit_move_insn_1 (rtx x, rtx y)
3037 enum machine_mode mode = GET_MODE (x);
3038 enum insn_code code;
3040 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3042 code = mov_optab->handlers[mode].insn_code;
3043 if (code != CODE_FOR_nothing)
3044 return emit_insn (GEN_FCN (code) (x, y));
3046 /* Expand complex moves by moving real part and imag part. */
3047 if (COMPLEX_MODE_P (mode))
3048 return emit_move_complex (mode, x, y);
3050 if (GET_MODE_CLASS (mode) == MODE_CC)
3051 return emit_move_ccmode (mode, x, y);
3053 /* Try using a move pattern for the corresponding integer mode. This is
3054 only safe when simplify_subreg can convert MODE constants into integer
3055 constants. At present, it can only do this reliably if the value
3056 fits within a HOST_WIDE_INT. */
3057 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3059 rtx ret = emit_move_via_integer (mode, x, y);
3064 return emit_move_multi_word (mode, x, y);
3067 /* Generate code to copy Y into X.
3068 Both Y and X must have the same mode, except that
3069 Y can be a constant with VOIDmode.
3070 This mode cannot be BLKmode; use emit_block_move for that.
3072 Return the last instruction emitted. */
3075 emit_move_insn (rtx x, rtx y)
3077 enum machine_mode mode = GET_MODE (x);
3078 rtx y_cst = NULL_RTX;
3081 gcc_assert (mode != BLKmode
3082 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3087 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3088 && (last_insn = compress_float_constant (x, y)))
3093 if (!LEGITIMATE_CONSTANT_P (y))
3095 y = force_const_mem (mode, y);
3097 /* If the target's cannot_force_const_mem prevented the spill,
3098 assume that the target's move expanders will also take care
3099 of the non-legitimate constant. */
3105 /* If X or Y are memory references, verify that their addresses are valid
3108 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3109 && ! push_operand (x, GET_MODE (x)))
3111 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3112 x = validize_mem (x);
3115 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3117 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3118 y = validize_mem (y);
3120 gcc_assert (mode != BLKmode);
3122 last_insn = emit_move_insn_1 (x, y);
3124 if (y_cst && REG_P (x)
3125 && (set = single_set (last_insn)) != NULL_RTX
3126 && SET_DEST (set) == x
3127 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3128 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3133 /* If Y is representable exactly in a narrower mode, and the target can
3134 perform the extension directly from constant or memory, then emit the
3135 move as an extension. */
3138 compress_float_constant (rtx x, rtx y)
3140 enum machine_mode dstmode = GET_MODE (x);
3141 enum machine_mode orig_srcmode = GET_MODE (y);
3142 enum machine_mode srcmode;
3145 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3147 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3148 srcmode != orig_srcmode;
3149 srcmode = GET_MODE_WIDER_MODE (srcmode))
3152 rtx trunc_y, last_insn;
3154 /* Skip if the target can't extend this way. */
3155 ic = can_extend_p (dstmode, srcmode, 0);
3156 if (ic == CODE_FOR_nothing)
3159 /* Skip if the narrowed value isn't exact. */
3160 if (! exact_real_truncate (srcmode, &r))
3163 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3165 if (LEGITIMATE_CONSTANT_P (trunc_y))
3167 /* Skip if the target needs extra instructions to perform
3169 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3172 else if (float_extend_from_mem[dstmode][srcmode])
3173 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3177 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3178 last_insn = get_last_insn ();
3181 set_unique_reg_note (last_insn, REG_EQUAL, y);
3189 /* Pushing data onto the stack. */
3191 /* Push a block of length SIZE (perhaps variable)
3192 and return an rtx to address the beginning of the block.
3193 The value may be virtual_outgoing_args_rtx.
3195 EXTRA is the number of bytes of padding to push in addition to SIZE.
3196 BELOW nonzero means this padding comes at low addresses;
3197 otherwise, the padding comes at high addresses. */
3200 push_block (rtx size, int extra, int below)
3204 size = convert_modes (Pmode, ptr_mode, size, 1);
3205 if (CONSTANT_P (size))
3206 anti_adjust_stack (plus_constant (size, extra));
3207 else if (REG_P (size) && extra == 0)
3208 anti_adjust_stack (size);
3211 temp = copy_to_mode_reg (Pmode, size);
3213 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3214 temp, 0, OPTAB_LIB_WIDEN);
3215 anti_adjust_stack (temp);
3218 #ifndef STACK_GROWS_DOWNWARD
3224 temp = virtual_outgoing_args_rtx;
3225 if (extra != 0 && below)
3226 temp = plus_constant (temp, extra);
3230 if (GET_CODE (size) == CONST_INT)
3231 temp = plus_constant (virtual_outgoing_args_rtx,
3232 -INTVAL (size) - (below ? 0 : extra));
3233 else if (extra != 0 && !below)
3234 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3235 negate_rtx (Pmode, plus_constant (size, extra)));
3237 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3238 negate_rtx (Pmode, size));
3241 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3244 #ifdef PUSH_ROUNDING
3246 /* Emit single push insn. */
3249 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3252 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3254 enum insn_code icode;
3255 insn_operand_predicate_fn pred;
3257 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3258 /* If there is push pattern, use it. Otherwise try old way of throwing
3259 MEM representing push operation to move expander. */
3260 icode = push_optab->handlers[(int) mode].insn_code;
3261 if (icode != CODE_FOR_nothing)
3263 if (((pred = insn_data[(int) icode].operand[0].predicate)
3264 && !((*pred) (x, mode))))
3265 x = force_reg (mode, x);
3266 emit_insn (GEN_FCN (icode) (x));
3269 if (GET_MODE_SIZE (mode) == rounded_size)
3270 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3271 /* If we are to pad downward, adjust the stack pointer first and
3272 then store X into the stack location using an offset. This is
3273 because emit_move_insn does not know how to pad; it does not have
3275 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3277 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3278 HOST_WIDE_INT offset;
3280 emit_move_insn (stack_pointer_rtx,
3281 expand_binop (Pmode,
3282 #ifdef STACK_GROWS_DOWNWARD
3288 GEN_INT (rounded_size),
3289 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3291 offset = (HOST_WIDE_INT) padding_size;
3292 #ifdef STACK_GROWS_DOWNWARD
3293 if (STACK_PUSH_CODE == POST_DEC)
3294 /* We have already decremented the stack pointer, so get the
3296 offset += (HOST_WIDE_INT) rounded_size;
3298 if (STACK_PUSH_CODE == POST_INC)
3299 /* We have already incremented the stack pointer, so get the
3301 offset -= (HOST_WIDE_INT) rounded_size;
3303 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3307 #ifdef STACK_GROWS_DOWNWARD
3308 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3309 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3310 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3312 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3313 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3314 GEN_INT (rounded_size));
3316 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3319 dest = gen_rtx_MEM (mode, dest_addr);
3323 set_mem_attributes (dest, type, 1);
3325 if (flag_optimize_sibling_calls)
3326 /* Function incoming arguments may overlap with sibling call
3327 outgoing arguments and we cannot allow reordering of reads
3328 from function arguments with stores to outgoing arguments
3329 of sibling calls. */
3330 set_mem_alias_set (dest, 0);
3332 emit_move_insn (dest, x);
3336 /* Generate code to push X onto the stack, assuming it has mode MODE and
3338 MODE is redundant except when X is a CONST_INT (since they don't
3340 SIZE is an rtx for the size of data to be copied (in bytes),
3341 needed only if X is BLKmode.
3343 ALIGN (in bits) is maximum alignment we can assume.
3345 If PARTIAL and REG are both nonzero, then copy that many of the first
3346 words of X into registers starting with REG, and push the rest of X.
3347 The amount of space pushed is decreased by PARTIAL words,
3348 rounded *down* to a multiple of PARM_BOUNDARY.
3349 REG must be a hard register in this case.
3350 If REG is zero but PARTIAL is not, take any all others actions for an
3351 argument partially in registers, but do not actually load any
3354 EXTRA is the amount in bytes of extra space to leave next to this arg.
3355 This is ignored if an argument block has already been allocated.
3357 On a machine that lacks real push insns, ARGS_ADDR is the address of
3358 the bottom of the argument block for this call. We use indexing off there
3359 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3360 argument block has not been preallocated.
3362 ARGS_SO_FAR is the size of args previously pushed for this call.
3364 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3365 for arguments passed in registers. If nonzero, it will be the number
3366 of bytes required. */
3369 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3370 unsigned int align, int partial, rtx reg, int extra,
3371 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3375 enum direction stack_direction
3376 #ifdef STACK_GROWS_DOWNWARD
3382 /* Decide where to pad the argument: `downward' for below,
3383 `upward' for above, or `none' for don't pad it.
3384 Default is below for small data on big-endian machines; else above. */
3385 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3387 /* Invert direction if stack is post-decrement.
3389 if (STACK_PUSH_CODE == POST_DEC)
3390 if (where_pad != none)
3391 where_pad = (where_pad == downward ? upward : downward);
3395 if (mode == BLKmode)
3397 /* Copy a block into the stack, entirely or partially. */
3400 int used = partial * UNITS_PER_WORD;
3404 if (reg && GET_CODE (reg) == PARALLEL)
3406 /* Use the size of the elt to compute offset. */
3407 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3408 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3409 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3412 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3418 /* USED is now the # of bytes we need not copy to the stack
3419 because registers will take care of them. */
3422 xinner = adjust_address (xinner, BLKmode, used);
3424 /* If the partial register-part of the arg counts in its stack size,
3425 skip the part of stack space corresponding to the registers.
3426 Otherwise, start copying to the beginning of the stack space,
3427 by setting SKIP to 0. */
3428 skip = (reg_parm_stack_space == 0) ? 0 : used;
3430 #ifdef PUSH_ROUNDING
3431 /* Do it with several push insns if that doesn't take lots of insns
3432 and if there is no difficulty with push insns that skip bytes
3433 on the stack for alignment purposes. */
3436 && GET_CODE (size) == CONST_INT
3438 && MEM_ALIGN (xinner) >= align
3439 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3440 /* Here we avoid the case of a structure whose weak alignment
3441 forces many pushes of a small amount of data,
3442 and such small pushes do rounding that causes trouble. */
3443 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3444 || align >= BIGGEST_ALIGNMENT
3445 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3446 == (align / BITS_PER_UNIT)))
3447 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3449 /* Push padding now if padding above and stack grows down,
3450 or if padding below and stack grows up.
3451 But if space already allocated, this has already been done. */
3452 if (extra && args_addr == 0
3453 && where_pad != none && where_pad != stack_direction)
3454 anti_adjust_stack (GEN_INT (extra));
3456 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3459 #endif /* PUSH_ROUNDING */
3463 /* Otherwise make space on the stack and copy the data
3464 to the address of that space. */
3466 /* Deduct words put into registers from the size we must copy. */
3469 if (GET_CODE (size) == CONST_INT)
3470 size = GEN_INT (INTVAL (size) - used);
3472 size = expand_binop (GET_MODE (size), sub_optab, size,
3473 GEN_INT (used), NULL_RTX, 0,
3477 /* Get the address of the stack space.
3478 In this case, we do not deal with EXTRA separately.
3479 A single stack adjust will do. */
3482 temp = push_block (size, extra, where_pad == downward);
3485 else if (GET_CODE (args_so_far) == CONST_INT)
3486 temp = memory_address (BLKmode,
3487 plus_constant (args_addr,
3488 skip + INTVAL (args_so_far)));
3490 temp = memory_address (BLKmode,
3491 plus_constant (gen_rtx_PLUS (Pmode,
3496 if (!ACCUMULATE_OUTGOING_ARGS)
3498 /* If the source is referenced relative to the stack pointer,
3499 copy it to another register to stabilize it. We do not need
3500 to do this if we know that we won't be changing sp. */
3502 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3503 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3504 temp = copy_to_reg (temp);
3507 target = gen_rtx_MEM (BLKmode, temp);
3509 /* We do *not* set_mem_attributes here, because incoming arguments
3510 may overlap with sibling call outgoing arguments and we cannot
3511 allow reordering of reads from function arguments with stores
3512 to outgoing arguments of sibling calls. We do, however, want
3513 to record the alignment of the stack slot. */
3514 /* ALIGN may well be better aligned than TYPE, e.g. due to
3515 PARM_BOUNDARY. Assume the caller isn't lying. */
3516 set_mem_align (target, align);
3518 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3521 else if (partial > 0)
3523 /* Scalar partly in registers. */
3525 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3528 /* # words of start of argument
3529 that we must make space for but need not store. */
3530 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3531 int args_offset = INTVAL (args_so_far);
3534 /* Push padding now if padding above and stack grows down,
3535 or if padding below and stack grows up.
3536 But if space already allocated, this has already been done. */
3537 if (extra && args_addr == 0
3538 && where_pad != none && where_pad != stack_direction)
3539 anti_adjust_stack (GEN_INT (extra));
3541 /* If we make space by pushing it, we might as well push
3542 the real data. Otherwise, we can leave OFFSET nonzero
3543 and leave the space uninitialized. */
3547 /* Now NOT_STACK gets the number of words that we don't need to
3548 allocate on the stack. */
3549 not_stack = partial - offset;
3551 /* If the partial register-part of the arg counts in its stack size,
3552 skip the part of stack space corresponding to the registers.
3553 Otherwise, start copying to the beginning of the stack space,
3554 by setting SKIP to 0. */
3555 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3557 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3558 x = validize_mem (force_const_mem (mode, x));
3560 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3561 SUBREGs of such registers are not allowed. */
3562 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3563 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3564 x = copy_to_reg (x);
3566 /* Loop over all the words allocated on the stack for this arg. */
3567 /* We can do it by words, because any scalar bigger than a word
3568 has a size a multiple of a word. */
3569 #ifndef PUSH_ARGS_REVERSED
3570 for (i = not_stack; i < size; i++)
3572 for (i = size - 1; i >= not_stack; i--)
3574 if (i >= not_stack + offset)
3575 emit_push_insn (operand_subword_force (x, i, mode),
3576 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3578 GEN_INT (args_offset + ((i - not_stack + skip)
3580 reg_parm_stack_space, alignment_pad);
3587 /* Push padding now if padding above and stack grows down,
3588 or if padding below and stack grows up.
3589 But if space already allocated, this has already been done. */
3590 if (extra && args_addr == 0
3591 && where_pad != none && where_pad != stack_direction)
3592 anti_adjust_stack (GEN_INT (extra));
3594 #ifdef PUSH_ROUNDING
3595 if (args_addr == 0 && PUSH_ARGS)
3596 emit_single_push_insn (mode, x, type);
3600 if (GET_CODE (args_so_far) == CONST_INT)
3602 = memory_address (mode,
3603 plus_constant (args_addr,
3604 INTVAL (args_so_far)));
3606 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3608 dest = gen_rtx_MEM (mode, addr);
3610 /* We do *not* set_mem_attributes here, because incoming arguments
3611 may overlap with sibling call outgoing arguments and we cannot
3612 allow reordering of reads from function arguments with stores
3613 to outgoing arguments of sibling calls. We do, however, want
3614 to record the alignment of the stack slot. */
3615 /* ALIGN may well be better aligned than TYPE, e.g. due to
3616 PARM_BOUNDARY. Assume the caller isn't lying. */
3617 set_mem_align (dest, align);
3619 emit_move_insn (dest, x);
3623 /* If part should go in registers, copy that part
3624 into the appropriate registers. Do this now, at the end,
3625 since mem-to-mem copies above may do function calls. */
3626 if (partial > 0 && reg != 0)
3628 /* Handle calls that pass values in multiple non-contiguous locations.
3629 The Irix 6 ABI has examples of this. */
3630 if (GET_CODE (reg) == PARALLEL)
3631 emit_group_load (reg, x, type, -1);
3633 move_block_to_reg (REGNO (reg), x, partial, mode);
3636 if (extra && args_addr == 0 && where_pad == stack_direction)
3637 anti_adjust_stack (GEN_INT (extra));
3639 if (alignment_pad && args_addr == 0)
3640 anti_adjust_stack (alignment_pad);
3643 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3647 get_subtarget (rtx x)
3651 /* Only registers can be subtargets. */
3653 /* Don't use hard regs to avoid extending their life. */
3654 || REGNO (x) < FIRST_PSEUDO_REGISTER
3658 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3659 FIELD is a bitfield. Returns true if the optimization was successful,
3660 and there's nothing else to do. */
3663 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3664 unsigned HOST_WIDE_INT bitpos,
3665 enum machine_mode mode1, rtx str_rtx,
3668 enum machine_mode str_mode = GET_MODE (str_rtx);
3669 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3674 if (mode1 != VOIDmode
3675 || bitsize >= BITS_PER_WORD
3676 || str_bitsize > BITS_PER_WORD
3677 || TREE_SIDE_EFFECTS (to)
3678 || TREE_THIS_VOLATILE (to))
3682 if (!BINARY_CLASS_P (src)
3683 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3686 op0 = TREE_OPERAND (src, 0);
3687 op1 = TREE_OPERAND (src, 1);
3690 if (!operand_equal_p (to, op0, 0))
3693 if (MEM_P (str_rtx))
3695 unsigned HOST_WIDE_INT offset1;
3697 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3698 str_mode = word_mode;
3699 str_mode = get_best_mode (bitsize, bitpos,
3700 MEM_ALIGN (str_rtx), str_mode, 0);
3701 if (str_mode == VOIDmode)
3703 str_bitsize = GET_MODE_BITSIZE (str_mode);
3706 bitpos %= str_bitsize;
3707 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3708 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3710 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3713 /* If the bit field covers the whole REG/MEM, store_field
3714 will likely generate better code. */
3715 if (bitsize >= str_bitsize)
3718 /* We can't handle fields split across multiple entities. */
3719 if (bitpos + bitsize > str_bitsize)
3722 if (BYTES_BIG_ENDIAN)
3723 bitpos = str_bitsize - bitpos - bitsize;
3725 switch (TREE_CODE (src))
3729 /* For now, just optimize the case of the topmost bitfield
3730 where we don't need to do any masking and also
3731 1 bit bitfields where xor can be used.
3732 We might win by one instruction for the other bitfields
3733 too if insv/extv instructions aren't used, so that
3734 can be added later. */
3735 if (bitpos + bitsize != str_bitsize
3736 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3739 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3740 value = convert_modes (str_mode,
3741 TYPE_MODE (TREE_TYPE (op1)), value,
3742 TYPE_UNSIGNED (TREE_TYPE (op1)));
3744 /* We may be accessing data outside the field, which means
3745 we can alias adjacent data. */
3746 if (MEM_P (str_rtx))
3748 str_rtx = shallow_copy_rtx (str_rtx);
3749 set_mem_alias_set (str_rtx, 0);
3750 set_mem_expr (str_rtx, 0);
3753 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3754 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3756 value = expand_and (str_mode, value, const1_rtx, NULL);
3759 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3760 build_int_cst (NULL_TREE, bitpos),
3762 result = expand_binop (str_mode, binop, str_rtx,
3763 value, str_rtx, 1, OPTAB_WIDEN);
3764 if (result != str_rtx)
3765 emit_move_insn (str_rtx, result);
3776 /* Expand an assignment that stores the value of FROM into TO. */
3779 expand_assignment (tree to, tree from)
3784 /* Don't crash if the lhs of the assignment was erroneous. */
3786 if (TREE_CODE (to) == ERROR_MARK)
3788 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3792 /* Assignment of a structure component needs special treatment
3793 if the structure component's rtx is not simply a MEM.
3794 Assignment of an array element at a constant index, and assignment of
3795 an array element in an unaligned packed structure field, has the same
3797 if (handled_component_p (to)
3798 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3800 enum machine_mode mode1;
3801 HOST_WIDE_INT bitsize, bitpos;
3809 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3810 &unsignedp, &volatilep, true);
3812 /* If we are going to use store_bit_field and extract_bit_field,
3813 make sure to_rtx will be safe for multiple use. */
3815 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3819 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3821 gcc_assert (MEM_P (to_rtx));
3823 #ifdef POINTERS_EXTEND_UNSIGNED
3824 if (GET_MODE (offset_rtx) != Pmode)
3825 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3827 if (GET_MODE (offset_rtx) != ptr_mode)
3828 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3831 /* A constant address in TO_RTX can have VOIDmode, we must not try
3832 to call force_reg for that case. Avoid that case. */
3834 && GET_MODE (to_rtx) == BLKmode
3835 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3837 && (bitpos % bitsize) == 0
3838 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3839 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3841 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3845 to_rtx = offset_address (to_rtx, offset_rtx,
3846 highest_pow2_factor_for_target (to,
3850 /* Handle expand_expr of a complex value returning a CONCAT. */
3851 if (GET_CODE (to_rtx) == CONCAT)
3853 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3855 gcc_assert (bitpos == 0);
3856 result = store_expr (from, to_rtx, false);
3860 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3861 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3868 /* If the field is at offset zero, we could have been given the
3869 DECL_RTX of the parent struct. Don't munge it. */
3870 to_rtx = shallow_copy_rtx (to_rtx);
3872 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3874 /* Deal with volatile and readonly fields. The former is only
3875 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3877 MEM_VOLATILE_P (to_rtx) = 1;
3878 if (component_uses_parent_alias_set (to))
3879 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3882 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3886 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3887 TREE_TYPE (tem), get_alias_set (to));
3891 preserve_temp_slots (result);
3897 /* If the rhs is a function call and its value is not an aggregate,
3898 call the function before we start to compute the lhs.
3899 This is needed for correct code for cases such as
3900 val = setjmp (buf) on machines where reference to val
3901 requires loading up part of an address in a separate insn.
3903 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3904 since it might be a promoted variable where the zero- or sign- extension
3905 needs to be done. Handling this in the normal way is safe because no
3906 computation is done before the call. */
3907 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3908 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3909 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3910 && REG_P (DECL_RTL (to))))
3915 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3917 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3919 /* Handle calls that return values in multiple non-contiguous locations.
3920 The Irix 6 ABI has examples of this. */
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, value, TREE_TYPE (from),
3923 int_size_in_bytes (TREE_TYPE (from)));
3924 else if (GET_MODE (to_rtx) == BLKmode)
3925 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3928 if (POINTER_TYPE_P (TREE_TYPE (to)))
3929 value = convert_memory_address (GET_MODE (to_rtx), value);
3930 emit_move_insn (to_rtx, value);
3932 preserve_temp_slots (to_rtx);
3938 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3939 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3942 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3944 /* Don't move directly into a return register. */
3945 if (TREE_CODE (to) == RESULT_DECL
3946 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3951 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3953 if (GET_CODE (to_rtx) == PARALLEL)
3954 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3955 int_size_in_bytes (TREE_TYPE (from)));
3957 emit_move_insn (to_rtx, temp);
3959 preserve_temp_slots (to_rtx);
3965 /* In case we are returning the contents of an object which overlaps
3966 the place the value is being stored, use a safe function when copying
3967 a value through a pointer into a structure value return block. */
3968 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3969 && current_function_returns_struct
3970 && !current_function_returns_pcc_struct)
3975 size = expr_size (from);
3976 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3978 emit_library_call (memmove_libfunc, LCT_NORMAL,
3979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3980 XEXP (from_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (sizetype),
3982 size, TYPE_UNSIGNED (sizetype)),
3983 TYPE_MODE (sizetype));
3985 preserve_temp_slots (to_rtx);
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, 0);
3995 preserve_temp_slots (result);
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4004 If the mode is BLKmode then we may return TARGET itself.
4005 It turns out that in BLKmode it doesn't cause a problem.
4006 because C has no operators that could combine two different
4007 assignments into the same BLKmode object with different values
4008 with no sequence point. Will other languages need this to
4011 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4012 stack, and block moves may need to be treated specially. */
4015 store_expr (tree exp, rtx target, int call_param_p)
4018 rtx alt_rtl = NULL_RTX;
4019 int dont_return_target = 0;
4021 if (VOID_TYPE_P (TREE_TYPE (exp)))
4023 /* C++ can generate ?: expressions with a throw expression in one
4024 branch and an rvalue in the other. Here, we resolve attempts to
4025 store the throw expression's nonexistent result. */
4026 gcc_assert (!call_param_p);
4027 expand_expr (exp, const0_rtx, VOIDmode, 0);
4030 if (TREE_CODE (exp) == COMPOUND_EXPR)
4032 /* Perform first part of compound expression, then assign from second
4034 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4035 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4036 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4038 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4040 /* For conditional expression, get safe form of the target. Then
4041 test the condition, doing the appropriate assignment on either
4042 side. This avoids the creation of unnecessary temporaries.
4043 For non-BLKmode, it is more efficient not to do this. */
4045 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4047 do_pending_stack_adjust ();
4049 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4050 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4051 emit_jump_insn (gen_jump (lab2));
4054 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4060 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4061 /* If this is a scalar in a register that is stored in a wider mode
4062 than the declared mode, compute the result into its declared mode
4063 and then convert to the wider mode. Our value is the computed
4066 rtx inner_target = 0;
4068 /* We can do the conversion inside EXP, which will often result
4069 in some optimizations. Do the conversion in two steps: first
4070 change the signedness, if needed, then the extend. But don't
4071 do this if the type of EXP is a subtype of something else
4072 since then the conversion might involve more than just
4073 converting modes. */
4074 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4075 && TREE_TYPE (TREE_TYPE (exp)) == 0
4076 && (!lang_hooks.reduce_bit_field_operations
4077 || (GET_MODE_PRECISION (GET_MODE (target))
4078 == TYPE_PRECISION (TREE_TYPE (exp)))))
4080 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4081 != SUBREG_PROMOTED_UNSIGNED_P (target))
4083 (lang_hooks.types.signed_or_unsigned_type
4084 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4086 exp = convert (lang_hooks.types.type_for_mode
4087 (GET_MODE (SUBREG_REG (target)),
4088 SUBREG_PROMOTED_UNSIGNED_P (target)),
4091 inner_target = SUBREG_REG (target);
4094 temp = expand_expr (exp, inner_target, VOIDmode,
4095 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4097 /* If TEMP is a VOIDmode constant, use convert_modes to make
4098 sure that we properly convert it. */
4099 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4101 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4102 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4103 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4104 GET_MODE (target), temp,
4105 SUBREG_PROMOTED_UNSIGNED_P (target));
4108 convert_move (SUBREG_REG (target), temp,
4109 SUBREG_PROMOTED_UNSIGNED_P (target));
4115 temp = expand_expr_real (exp, target, GET_MODE (target),
4117 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4119 /* Return TARGET if it's a specified hardware register.
4120 If TARGET is a volatile mem ref, either return TARGET
4121 or return a reg copied *from* TARGET; ANSI requires this.
4123 Otherwise, if TEMP is not TARGET, return TEMP
4124 if it is constant (for efficiency),
4125 or if we really want the correct value. */
4126 if (!(target && REG_P (target)
4127 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4128 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4129 && ! rtx_equal_p (temp, target)
4130 && CONSTANT_P (temp))
4131 dont_return_target = 1;
4134 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4135 the same as that of TARGET, adjust the constant. This is needed, for
4136 example, in case it is a CONST_DOUBLE and we want only a word-sized
4138 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4139 && TREE_CODE (exp) != ERROR_MARK
4140 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4141 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4142 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4144 /* If value was not generated in the target, store it there.
4145 Convert the value to TARGET's type first if necessary and emit the
4146 pending incrementations that have been queued when expanding EXP.
4147 Note that we cannot emit the whole queue blindly because this will
4148 effectively disable the POST_INC optimization later.
4150 If TEMP and TARGET compare equal according to rtx_equal_p, but
4151 one or both of them are volatile memory refs, we have to distinguish
4153 - expand_expr has used TARGET. In this case, we must not generate
4154 another copy. This can be detected by TARGET being equal according
4156 - expand_expr has not used TARGET - that means that the source just
4157 happens to have the same RTX form. Since temp will have been created
4158 by expand_expr, it will compare unequal according to == .
4159 We must generate a copy in this case, to reach the correct number
4160 of volatile memory references. */
4162 if ((! rtx_equal_p (temp, target)
4163 || (temp != target && (side_effects_p (temp)
4164 || side_effects_p (target))))
4165 && TREE_CODE (exp) != ERROR_MARK
4166 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4167 but TARGET is not valid memory reference, TEMP will differ
4168 from TARGET although it is really the same location. */
4169 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4170 /* If there's nothing to copy, don't bother. Don't call expr_size
4171 unless necessary, because some front-ends (C++) expr_size-hook
4172 aborts on objects that are not supposed to be bit-copied or
4174 && expr_size (exp) != const0_rtx)
4176 if (GET_MODE (temp) != GET_MODE (target)
4177 && GET_MODE (temp) != VOIDmode)
4179 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4180 if (dont_return_target)
4182 /* In this case, we will return TEMP,
4183 so make sure it has the proper mode.
4184 But don't forget to store the value into TARGET. */
4185 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4186 emit_move_insn (target, temp);
4189 convert_move (target, temp, unsignedp);
4192 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4194 /* Handle copying a string constant into an array. The string
4195 constant may be shorter than the array. So copy just the string's
4196 actual length, and clear the rest. First get the size of the data
4197 type of the string, which is actually the size of the target. */
4198 rtx size = expr_size (exp);
4200 if (GET_CODE (size) == CONST_INT
4201 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4202 emit_block_move (target, temp, size,
4204 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4207 /* Compute the size of the data to copy from the string. */
4209 = size_binop (MIN_EXPR,
4210 make_tree (sizetype, size),
4211 size_int (TREE_STRING_LENGTH (exp)));
4213 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4215 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4218 /* Copy that much. */
4219 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4220 TYPE_UNSIGNED (sizetype));
4221 emit_block_move (target, temp, copy_size_rtx,
4223 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4225 /* Figure out how much is left in TARGET that we have to clear.
4226 Do all calculations in ptr_mode. */
4227 if (GET_CODE (copy_size_rtx) == CONST_INT)
4229 size = plus_constant (size, -INTVAL (copy_size_rtx));
4230 target = adjust_address (target, BLKmode,
4231 INTVAL (copy_size_rtx));
4235 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4236 copy_size_rtx, NULL_RTX, 0,
4239 #ifdef POINTERS_EXTEND_UNSIGNED
4240 if (GET_MODE (copy_size_rtx) != Pmode)
4241 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4242 TYPE_UNSIGNED (sizetype));
4245 target = offset_address (target, copy_size_rtx,
4246 highest_pow2_factor (copy_size));
4247 label = gen_label_rtx ();
4248 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4249 GET_MODE (size), 0, label);
4252 if (size != const0_rtx)
4253 clear_storage (target, size);
4259 /* Handle calls that return values in multiple non-contiguous locations.
4260 The Irix 6 ABI has examples of this. */
4261 else if (GET_CODE (target) == PARALLEL)
4262 emit_group_load (target, temp, TREE_TYPE (exp),
4263 int_size_in_bytes (TREE_TYPE (exp)));
4264 else if (GET_MODE (temp) == BLKmode)
4265 emit_block_move (target, temp, expr_size (exp),
4267 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4270 temp = force_operand (temp, target);
4272 emit_move_insn (target, temp);
4279 /* Examine CTOR to discover:
4280 * how many scalar fields are set to nonzero values,
4281 and place it in *P_NZ_ELTS;
4282 * how many scalar fields are set to non-constant values,
4283 and place it in *P_NC_ELTS; and
4284 * how many scalar fields in total are in CTOR,
4285 and place it in *P_ELT_COUNT. */
4288 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4289 HOST_WIDE_INT *p_nc_elts,
4290 HOST_WIDE_INT *p_elt_count)
4292 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4299 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4301 tree value = TREE_VALUE (list);
4302 tree purpose = TREE_PURPOSE (list);
4306 if (TREE_CODE (purpose) == RANGE_EXPR)
4308 tree lo_index = TREE_OPERAND (purpose, 0);
4309 tree hi_index = TREE_OPERAND (purpose, 1);
4311 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4312 mult = (tree_low_cst (hi_index, 1)
4313 - tree_low_cst (lo_index, 1) + 1);
4316 switch (TREE_CODE (value))
4320 HOST_WIDE_INT nz = 0, nc = 0, count = 0;
4321 categorize_ctor_elements_1 (value, &nz, &nc, &count);
4322 nz_elts += mult * nz;
4323 nc_elts += mult * nc;
4324 elt_count += mult * count;
4330 if (!initializer_zerop (value))
4336 nz_elts += mult * TREE_STRING_LENGTH (value);
4337 elt_count += mult * TREE_STRING_LENGTH (value);
4341 if (!initializer_zerop (TREE_REALPART (value)))
4343 if (!initializer_zerop (TREE_IMAGPART (value)))
4351 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4353 if (!initializer_zerop (TREE_VALUE (v)))
4363 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4369 *p_nz_elts += nz_elts;
4370 *p_nc_elts += nc_elts;
4371 *p_elt_count += elt_count;
4375 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4376 HOST_WIDE_INT *p_nc_elts,
4377 HOST_WIDE_INT *p_elt_count)
4382 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count);
4385 /* Count the number of scalars in TYPE. Return -1 on overflow or
4389 count_type_elements (tree type)
4391 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4392 switch (TREE_CODE (type))
4396 tree telts = array_type_nelts (type);
4397 if (telts && host_integerp (telts, 1))
4399 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4400 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4403 else if (max / n > m)
4411 HOST_WIDE_INT n = 0, t;
4414 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4415 if (TREE_CODE (f) == FIELD_DECL)
4417 t = count_type_elements (TREE_TYPE (f));
4427 case QUAL_UNION_TYPE:
4429 /* Ho hum. How in the world do we guess here? Clearly it isn't
4430 right to count the fields. Guess based on the number of words. */
4431 HOST_WIDE_INT n = int_size_in_bytes (type);
4434 return n / UNITS_PER_WORD;
4441 return TYPE_VECTOR_SUBPARTS (type);
4450 case REFERENCE_TYPE:
4463 /* Return 1 if EXP contains mostly (3/4) zeros. */
4466 mostly_zeros_p (tree exp)
4468 if (TREE_CODE (exp) == CONSTRUCTOR)
4471 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4473 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count);
4474 elts = count_type_elements (TREE_TYPE (exp));
4476 return nz_elts < elts / 4;
4479 return initializer_zerop (exp);
4482 /* Helper function for store_constructor.
4483 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4484 TYPE is the type of the CONSTRUCTOR, not the element type.
4485 CLEARED is as for store_constructor.
4486 ALIAS_SET is the alias set to use for any stores.
4488 This provides a recursive shortcut back to store_constructor when it isn't
4489 necessary to go through store_field. This is so that we can pass through
4490 the cleared field to let store_constructor know that we may not have to
4491 clear a substructure if the outer structure has already been cleared. */
4494 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4495 HOST_WIDE_INT bitpos, enum machine_mode mode,
4496 tree exp, tree type, int cleared, int alias_set)
4498 if (TREE_CODE (exp) == CONSTRUCTOR
4499 /* We can only call store_constructor recursively if the size and
4500 bit position are on a byte boundary. */
4501 && bitpos % BITS_PER_UNIT == 0
4502 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4503 /* If we have a nonzero bitpos for a register target, then we just
4504 let store_field do the bitfield handling. This is unlikely to
4505 generate unnecessary clear instructions anyways. */
4506 && (bitpos == 0 || MEM_P (target)))
4510 = adjust_address (target,
4511 GET_MODE (target) == BLKmode
4513 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4514 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4517 /* Update the alias set, if required. */
4518 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4519 && MEM_ALIAS_SET (target) != 0)
4521 target = copy_rtx (target);
4522 set_mem_alias_set (target, alias_set);
4525 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4528 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4531 /* Store the value of constructor EXP into the rtx TARGET.
4532 TARGET is either a REG or a MEM; we know it cannot conflict, since
4533 safe_from_p has been called.
4534 CLEARED is true if TARGET is known to have been zero'd.
4535 SIZE is the number of bytes of TARGET we are allowed to modify: this
4536 may not be the same as the size of EXP if we are assigning to a field
4537 which has been packed to exclude padding bits. */
4540 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4542 tree type = TREE_TYPE (exp);
4543 #ifdef WORD_REGISTER_OPERATIONS
4544 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4547 switch (TREE_CODE (type))
4551 case QUAL_UNION_TYPE:
4555 /* If size is zero or the target is already cleared, do nothing. */
4556 if (size == 0 || cleared)
4558 /* We either clear the aggregate or indicate the value is dead. */
4559 else if ((TREE_CODE (type) == UNION_TYPE
4560 || TREE_CODE (type) == QUAL_UNION_TYPE)
4561 && ! CONSTRUCTOR_ELTS (exp))
4562 /* If the constructor is empty, clear the union. */
4564 clear_storage (target, expr_size (exp));
4568 /* If we are building a static constructor into a register,
4569 set the initial value as zero so we can fold the value into
4570 a constant. But if more than one register is involved,
4571 this probably loses. */
4572 else if (REG_P (target) && TREE_STATIC (exp)
4573 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4575 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4579 /* If the constructor has fewer fields than the structure or
4580 if we are initializing the structure to mostly zeros, clear
4581 the whole structure first. Don't do this if TARGET is a
4582 register whose mode size isn't equal to SIZE since
4583 clear_storage can't handle this case. */
4585 && ((list_length (CONSTRUCTOR_ELTS (exp))
4586 != fields_length (type))
4587 || mostly_zeros_p (exp))
4589 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4592 clear_storage (target, GEN_INT (size));
4597 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4599 /* Store each element of the constructor into the
4600 corresponding field of TARGET. */
4602 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4604 tree field = TREE_PURPOSE (elt);
4605 tree value = TREE_VALUE (elt);
4606 enum machine_mode mode;
4607 HOST_WIDE_INT bitsize;
4608 HOST_WIDE_INT bitpos = 0;
4610 rtx to_rtx = target;
4612 /* Just ignore missing fields. We cleared the whole
4613 structure, above, if any fields are missing. */
4617 if (cleared && initializer_zerop (value))
4620 if (host_integerp (DECL_SIZE (field), 1))
4621 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4625 mode = DECL_MODE (field);
4626 if (DECL_BIT_FIELD (field))
4629 offset = DECL_FIELD_OFFSET (field);
4630 if (host_integerp (offset, 0)
4631 && host_integerp (bit_position (field), 0))
4633 bitpos = int_bit_position (field);
4637 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4644 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4645 make_tree (TREE_TYPE (exp),
4648 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4649 gcc_assert (MEM_P (to_rtx));
4651 #ifdef POINTERS_EXTEND_UNSIGNED
4652 if (GET_MODE (offset_rtx) != Pmode)
4653 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4655 if (GET_MODE (offset_rtx) != ptr_mode)
4656 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4659 to_rtx = offset_address (to_rtx, offset_rtx,
4660 highest_pow2_factor (offset));
4663 #ifdef WORD_REGISTER_OPERATIONS
4664 /* If this initializes a field that is smaller than a
4665 word, at the start of a word, try to widen it to a full
4666 word. This special case allows us to output C++ member
4667 function initializations in a form that the optimizers
4670 && bitsize < BITS_PER_WORD
4671 && bitpos % BITS_PER_WORD == 0
4672 && GET_MODE_CLASS (mode) == MODE_INT
4673 && TREE_CODE (value) == INTEGER_CST
4675 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4677 tree type = TREE_TYPE (value);
4679 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4681 type = lang_hooks.types.type_for_size
4682 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4683 value = convert (type, value);
4686 if (BYTES_BIG_ENDIAN)
4688 = fold (build2 (LSHIFT_EXPR, type, value,
4689 build_int_cst (NULL_TREE,
4690 BITS_PER_WORD - bitsize)));
4691 bitsize = BITS_PER_WORD;
4696 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4697 && DECL_NONADDRESSABLE_P (field))
4699 to_rtx = copy_rtx (to_rtx);
4700 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4703 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4704 value, type, cleared,
4705 get_alias_set (TREE_TYPE (field)));
4715 tree elttype = TREE_TYPE (type);
4717 HOST_WIDE_INT minelt = 0;
4718 HOST_WIDE_INT maxelt = 0;
4720 domain = TYPE_DOMAIN (type);
4721 const_bounds_p = (TYPE_MIN_VALUE (domain)
4722 && TYPE_MAX_VALUE (domain)
4723 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4726 /* If we have constant bounds for the range of the type, get them. */
4729 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4730 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4733 /* If the constructor has fewer elements than the array, clear
4734 the whole array first. Similarly if this is static
4735 constructor of a non-BLKmode object. */
4738 else if (REG_P (target) && TREE_STATIC (exp))
4742 HOST_WIDE_INT count = 0, zero_count = 0;
4743 need_to_clear = ! const_bounds_p;
4745 /* This loop is a more accurate version of the loop in
4746 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4747 is also needed to check for missing elements. */
4748 for (elt = CONSTRUCTOR_ELTS (exp);
4749 elt != NULL_TREE && ! need_to_clear;
4750 elt = TREE_CHAIN (elt))
4752 tree index = TREE_PURPOSE (elt);
4753 HOST_WIDE_INT this_node_count;
4755 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4757 tree lo_index = TREE_OPERAND (index, 0);
4758 tree hi_index = TREE_OPERAND (index, 1);
4760 if (! host_integerp (lo_index, 1)
4761 || ! host_integerp (hi_index, 1))
4767 this_node_count = (tree_low_cst (hi_index, 1)
4768 - tree_low_cst (lo_index, 1) + 1);
4771 this_node_count = 1;
4773 count += this_node_count;
4774 if (mostly_zeros_p (TREE_VALUE (elt)))
4775 zero_count += this_node_count;
4778 /* Clear the entire array first if there are any missing
4779 elements, or if the incidence of zero elements is >=
4782 && (count < maxelt - minelt + 1
4783 || 4 * zero_count >= 3 * count))
4787 if (need_to_clear && size > 0)
4790 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4792 clear_storage (target, GEN_INT (size));
4796 if (!cleared && REG_P (target))
4797 /* Inform later passes that the old value is dead. */
4798 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4800 /* Store each element of the constructor into the
4801 corresponding element of TARGET, determined by counting the
4803 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4805 elt = TREE_CHAIN (elt), i++)
4807 enum machine_mode mode;
4808 HOST_WIDE_INT bitsize;
4809 HOST_WIDE_INT bitpos;
4811 tree value = TREE_VALUE (elt);
4812 tree index = TREE_PURPOSE (elt);
4813 rtx xtarget = target;
4815 if (cleared && initializer_zerop (value))
4818 unsignedp = TYPE_UNSIGNED (elttype);
4819 mode = TYPE_MODE (elttype);
4820 if (mode == BLKmode)
4821 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4822 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4825 bitsize = GET_MODE_BITSIZE (mode);
4827 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4829 tree lo_index = TREE_OPERAND (index, 0);
4830 tree hi_index = TREE_OPERAND (index, 1);
4831 rtx index_r, pos_rtx;
4832 HOST_WIDE_INT lo, hi, count;
4835 /* If the range is constant and "small", unroll the loop. */
4837 && host_integerp (lo_index, 0)
4838 && host_integerp (hi_index, 0)
4839 && (lo = tree_low_cst (lo_index, 0),
4840 hi = tree_low_cst (hi_index, 0),
4841 count = hi - lo + 1,
4844 || (host_integerp (TYPE_SIZE (elttype), 1)
4845 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4848 lo -= minelt; hi -= minelt;
4849 for (; lo <= hi; lo++)
4851 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4854 && !MEM_KEEP_ALIAS_SET_P (target)
4855 && TREE_CODE (type) == ARRAY_TYPE
4856 && TYPE_NONALIASED_COMPONENT (type))
4858 target = copy_rtx (target);
4859 MEM_KEEP_ALIAS_SET_P (target) = 1;
4862 store_constructor_field
4863 (target, bitsize, bitpos, mode, value, type, cleared,
4864 get_alias_set (elttype));
4869 rtx loop_start = gen_label_rtx ();
4870 rtx loop_end = gen_label_rtx ();
4873 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4874 unsignedp = TYPE_UNSIGNED (domain);
4876 index = build_decl (VAR_DECL, NULL_TREE, domain);
4879 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4881 SET_DECL_RTL (index, index_r);
4882 store_expr (lo_index, index_r, 0);
4884 /* Build the head of the loop. */
4885 do_pending_stack_adjust ();
4886 emit_label (loop_start);
4888 /* Assign value to element index. */
4890 = convert (ssizetype,
4891 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4892 index, TYPE_MIN_VALUE (domain))));
4893 position = size_binop (MULT_EXPR, position,
4895 TYPE_SIZE_UNIT (elttype)));
4897 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4898 xtarget = offset_address (target, pos_rtx,
4899 highest_pow2_factor (position));
4900 xtarget = adjust_address (xtarget, mode, 0);
4901 if (TREE_CODE (value) == CONSTRUCTOR)
4902 store_constructor (value, xtarget, cleared,
4903 bitsize / BITS_PER_UNIT);
4905 store_expr (value, xtarget, 0);
4907 /* Generate a conditional jump to exit the loop. */
4908 exit_cond = build2 (LT_EXPR, integer_type_node,
4910 jumpif (exit_cond, loop_end);
4912 /* Update the loop counter, and jump to the head of
4914 expand_assignment (index,
4915 build2 (PLUS_EXPR, TREE_TYPE (index),
4916 index, integer_one_node));
4918 emit_jump (loop_start);
4920 /* Build the end of the loop. */
4921 emit_label (loop_end);
4924 else if ((index != 0 && ! host_integerp (index, 0))
4925 || ! host_integerp (TYPE_SIZE (elttype), 1))
4930 index = ssize_int (1);
4933 index = fold_convert (ssizetype,
4934 fold (build2 (MINUS_EXPR,
4937 TYPE_MIN_VALUE (domain))));
4939 position = size_binop (MULT_EXPR, index,
4941 TYPE_SIZE_UNIT (elttype)));
4942 xtarget = offset_address (target,
4943 expand_expr (position, 0, VOIDmode, 0),
4944 highest_pow2_factor (position));
4945 xtarget = adjust_address (xtarget, mode, 0);
4946 store_expr (value, xtarget, 0);
4951 bitpos = ((tree_low_cst (index, 0) - minelt)
4952 * tree_low_cst (TYPE_SIZE (elttype), 1));
4954 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4956 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4957 && TREE_CODE (type) == ARRAY_TYPE
4958 && TYPE_NONALIASED_COMPONENT (type))
4960 target = copy_rtx (target);
4961 MEM_KEEP_ALIAS_SET_P (target) = 1;
4963 store_constructor_field (target, bitsize, bitpos, mode, value,
4964 type, cleared, get_alias_set (elttype));
4976 tree elttype = TREE_TYPE (type);
4977 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4978 enum machine_mode eltmode = TYPE_MODE (elttype);
4979 HOST_WIDE_INT bitsize;
4980 HOST_WIDE_INT bitpos;
4984 gcc_assert (eltmode != BLKmode);
4986 n_elts = TYPE_VECTOR_SUBPARTS (type);
4987 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4989 enum machine_mode mode = GET_MODE (target);
4991 icode = (int) vec_init_optab->handlers[mode].insn_code;
4992 if (icode != CODE_FOR_nothing)
4996 vector = alloca (n_elts);
4997 for (i = 0; i < n_elts; i++)
4998 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
5002 /* If the constructor has fewer elements than the vector,
5003 clear the whole array first. Similarly if this is static
5004 constructor of a non-BLKmode object. */
5007 else if (REG_P (target) && TREE_STATIC (exp))
5011 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5013 for (elt = CONSTRUCTOR_ELTS (exp);
5015 elt = TREE_CHAIN (elt))
5017 int n_elts_here = tree_low_cst
5018 (int_const_binop (TRUNC_DIV_EXPR,
5019 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5020 TYPE_SIZE (elttype), 0), 1);
5022 count += n_elts_here;
5023 if (mostly_zeros_p (TREE_VALUE (elt)))
5024 zero_count += n_elts_here;
5027 /* Clear the entire vector first if there are any missing elements,
5028 or if the incidence of zero elements is >= 75%. */
5029 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5032 if (need_to_clear && size > 0 && !vector)
5035 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5037 clear_storage (target, GEN_INT (size));
5041 if (!cleared && REG_P (target))
5042 /* Inform later passes that the old value is dead. */
5043 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5045 /* Store each element of the constructor into the corresponding
5046 element of TARGET, determined by counting the elements. */
5047 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5049 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5051 tree value = TREE_VALUE (elt);
5052 tree index = TREE_PURPOSE (elt);
5053 HOST_WIDE_INT eltpos;
5055 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5056 if (cleared && initializer_zerop (value))
5060 eltpos = tree_low_cst (index, 1);
5066 /* Vector CONSTRUCTORs should only be built from smaller
5067 vectors in the case of BLKmode vectors. */
5068 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5069 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5073 enum machine_mode value_mode =
5074 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5075 ? TYPE_MODE (TREE_TYPE (value))
5077 bitpos = eltpos * elt_size;
5078 store_constructor_field (target, bitsize, bitpos,
5079 value_mode, value, type,
5080 cleared, get_alias_set (elttype));
5085 emit_insn (GEN_FCN (icode)
5087 gen_rtx_PARALLEL (GET_MODE (target),
5088 gen_rtvec_v (n_elts, vector))));
5097 /* Store the value of EXP (an expression tree)
5098 into a subfield of TARGET which has mode MODE and occupies
5099 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5100 If MODE is VOIDmode, it means that we are storing into a bit-field.
5102 Always return const0_rtx unless we have something particular to
5105 TYPE is the type of the underlying object,
5107 ALIAS_SET is the alias set for the destination. This value will
5108 (in general) be different from that for TARGET, since TARGET is a
5109 reference to the containing structure. */
5112 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5113 enum machine_mode mode, tree exp, tree type, int alias_set)
5115 HOST_WIDE_INT width_mask = 0;
5117 if (TREE_CODE (exp) == ERROR_MARK)
5120 /* If we have nothing to store, do nothing unless the expression has
5123 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5124 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5125 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5127 /* If we are storing into an unaligned field of an aligned union that is
5128 in a register, we may have the mode of TARGET being an integer mode but
5129 MODE == BLKmode. In that case, get an aligned object whose size and
5130 alignment are the same as TARGET and store TARGET into it (we can avoid
5131 the store if the field being stored is the entire width of TARGET). Then
5132 call ourselves recursively to store the field into a BLKmode version of
5133 that object. Finally, load from the object into TARGET. This is not
5134 very efficient in general, but should only be slightly more expensive
5135 than the otherwise-required unaligned accesses. Perhaps this can be
5136 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5137 twice, once with emit_move_insn and once via store_field. */
5140 && (REG_P (target) || GET_CODE (target) == SUBREG))
5142 rtx object = assign_temp (type, 0, 1, 1);
5143 rtx blk_object = adjust_address (object, BLKmode, 0);
5145 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5146 emit_move_insn (object, target);
5148 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5150 emit_move_insn (target, object);
5152 /* We want to return the BLKmode version of the data. */
5156 if (GET_CODE (target) == CONCAT)
5158 /* We're storing into a struct containing a single __complex. */
5160 gcc_assert (!bitpos);
5161 return store_expr (exp, target, 0);
5164 /* If the structure is in a register or if the component
5165 is a bit field, we cannot use addressing to access it.
5166 Use bit-field techniques or SUBREG to store in it. */
5168 if (mode == VOIDmode
5169 || (mode != BLKmode && ! direct_store[(int) mode]
5170 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5171 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5173 || GET_CODE (target) == SUBREG
5174 /* If the field isn't aligned enough to store as an ordinary memref,
5175 store it as a bit field. */
5177 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5178 || bitpos % GET_MODE_ALIGNMENT (mode))
5179 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5180 || (bitpos % BITS_PER_UNIT != 0)))
5181 /* If the RHS and field are a constant size and the size of the
5182 RHS isn't the same size as the bitfield, we must use bitfield
5185 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5186 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5188 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5190 /* If BITSIZE is narrower than the size of the type of EXP
5191 we will be narrowing TEMP. Normally, what's wanted are the
5192 low-order bits. However, if EXP's type is a record and this is
5193 big-endian machine, we want the upper BITSIZE bits. */
5194 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5195 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5196 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5197 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5198 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5202 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5204 if (mode != VOIDmode && mode != BLKmode
5205 && mode != TYPE_MODE (TREE_TYPE (exp)))
5206 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5208 /* If the modes of TARGET and TEMP are both BLKmode, both
5209 must be in memory and BITPOS must be aligned on a byte
5210 boundary. If so, we simply do a block copy. */
5211 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5213 gcc_assert (MEM_P (target) && MEM_P (temp)
5214 && !(bitpos % BITS_PER_UNIT));
5216 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5217 emit_block_move (target, temp,
5218 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5225 /* Store the value in the bitfield. */
5226 store_bit_field (target, bitsize, bitpos, mode, temp);
5232 /* Now build a reference to just the desired component. */
5233 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5235 if (to_rtx == target)
5236 to_rtx = copy_rtx (to_rtx);
5238 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5239 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5240 set_mem_alias_set (to_rtx, alias_set);
5242 return store_expr (exp, to_rtx, 0);
5246 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5247 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5248 codes and find the ultimate containing object, which we return.
5250 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5251 bit position, and *PUNSIGNEDP to the signedness of the field.
5252 If the position of the field is variable, we store a tree
5253 giving the variable offset (in units) in *POFFSET.
5254 This offset is in addition to the bit position.
5255 If the position is not variable, we store 0 in *POFFSET.
5257 If any of the extraction expressions is volatile,
5258 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5260 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5261 is a mode that can be used to access the field. In that case, *PBITSIZE
5264 If the field describes a variable-sized object, *PMODE is set to
5265 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5266 this case, but the address of the object can be found.
5268 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5269 look through nodes that serve as markers of a greater alignment than
5270 the one that can be deduced from the expression. These nodes make it
5271 possible for front-ends to prevent temporaries from being created by
5272 the middle-end on alignment considerations. For that purpose, the
5273 normal operating mode at high-level is to always pass FALSE so that
5274 the ultimate containing object is really returned; moreover, the
5275 associated predicate handled_component_p will always return TRUE
5276 on these nodes, thus indicating that they are essentially handled
5277 by get_inner_reference. TRUE should only be passed when the caller
5278 is scanning the expression in order to build another representation
5279 and specifically knows how to handle these nodes; as such, this is
5280 the normal operating mode in the RTL expanders. */
5283 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5284 HOST_WIDE_INT *pbitpos, tree *poffset,
5285 enum machine_mode *pmode, int *punsignedp,
5286 int *pvolatilep, bool keep_aligning)
5289 enum machine_mode mode = VOIDmode;
5290 tree offset = size_zero_node;
5291 tree bit_offset = bitsize_zero_node;
5294 /* First get the mode, signedness, and size. We do this from just the
5295 outermost expression. */
5296 if (TREE_CODE (exp) == COMPONENT_REF)
5298 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5299 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5300 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5302 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5304 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5306 size_tree = TREE_OPERAND (exp, 1);
5307 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5311 mode = TYPE_MODE (TREE_TYPE (exp));
5312 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5314 if (mode == BLKmode)
5315 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5317 *pbitsize = GET_MODE_BITSIZE (mode);
5322 if (! host_integerp (size_tree, 1))
5323 mode = BLKmode, *pbitsize = -1;
5325 *pbitsize = tree_low_cst (size_tree, 1);
5328 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5329 and find the ultimate containing object. */
5332 switch (TREE_CODE (exp))
5335 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5336 TREE_OPERAND (exp, 2));
5341 tree field = TREE_OPERAND (exp, 1);
5342 tree this_offset = component_ref_field_offset (exp);
5344 /* If this field hasn't been filled in yet, don't go past it.
5345 This should only happen when folding expressions made during
5346 type construction. */
5347 if (this_offset == 0)
5350 offset = size_binop (PLUS_EXPR, offset, this_offset);
5351 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5352 DECL_FIELD_BIT_OFFSET (field));
5354 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5359 case ARRAY_RANGE_REF:
5361 tree index = TREE_OPERAND (exp, 1);
5362 tree low_bound = array_ref_low_bound (exp);
5363 tree unit_size = array_ref_element_size (exp);
5365 /* We assume all arrays have sizes that are a multiple of a byte.
5366 First subtract the lower bound, if any, in the type of the
5367 index, then convert to sizetype and multiply by the size of
5368 the array element. */
5369 if (! integer_zerop (low_bound))
5370 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5373 offset = size_binop (PLUS_EXPR, offset,
5374 size_binop (MULT_EXPR,
5375 convert (sizetype, index),
5384 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5385 bitsize_int (*pbitsize));
5388 case VIEW_CONVERT_EXPR:
5389 if (keep_aligning && STRICT_ALIGNMENT
5390 && (TYPE_ALIGN (TREE_TYPE (exp))
5391 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5392 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5393 < BIGGEST_ALIGNMENT)
5394 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5395 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5403 /* If any reference in the chain is volatile, the effect is volatile. */
5404 if (TREE_THIS_VOLATILE (exp))
5407 exp = TREE_OPERAND (exp, 0);
5411 /* If OFFSET is constant, see if we can return the whole thing as a
5412 constant bit position. Otherwise, split it up. */
5413 if (host_integerp (offset, 0)
5414 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5416 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5417 && host_integerp (tem, 0))
5418 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5420 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5426 /* Return a tree of sizetype representing the size, in bytes, of the element
5427 of EXP, an ARRAY_REF. */
5430 array_ref_element_size (tree exp)
5432 tree aligned_size = TREE_OPERAND (exp, 3);
5433 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5435 /* If a size was specified in the ARRAY_REF, it's the size measured
5436 in alignment units of the element type. So multiply by that value. */
5439 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5440 sizetype from another type of the same width and signedness. */
5441 if (TREE_TYPE (aligned_size) != sizetype)
5442 aligned_size = fold_convert (sizetype, aligned_size);
5443 return size_binop (MULT_EXPR, aligned_size,
5444 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5447 /* Otherwise, take the size from that of the element type. Substitute
5448 any PLACEHOLDER_EXPR that we have. */
5450 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5453 /* Return a tree representing the lower bound of the array mentioned in
5454 EXP, an ARRAY_REF. */
5457 array_ref_low_bound (tree exp)
5459 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5461 /* If a lower bound is specified in EXP, use it. */
5462 if (TREE_OPERAND (exp, 2))
5463 return TREE_OPERAND (exp, 2);
5465 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5466 substituting for a PLACEHOLDER_EXPR as needed. */
5467 if (domain_type && TYPE_MIN_VALUE (domain_type))
5468 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5470 /* Otherwise, return a zero of the appropriate type. */
5471 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5474 /* Return a tree representing the upper bound of the array mentioned in
5475 EXP, an ARRAY_REF. */
5478 array_ref_up_bound (tree exp)
5480 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5482 /* If there is a domain type and it has an upper bound, use it, substituting
5483 for a PLACEHOLDER_EXPR as needed. */
5484 if (domain_type && TYPE_MAX_VALUE (domain_type))
5485 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5487 /* Otherwise fail. */
5491 /* Return a tree representing the offset, in bytes, of the field referenced
5492 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5495 component_ref_field_offset (tree exp)
5497 tree aligned_offset = TREE_OPERAND (exp, 2);
5498 tree field = TREE_OPERAND (exp, 1);
5500 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5501 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5505 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5506 sizetype from another type of the same width and signedness. */
5507 if (TREE_TYPE (aligned_offset) != sizetype)
5508 aligned_offset = fold_convert (sizetype, aligned_offset);
5509 return size_binop (MULT_EXPR, aligned_offset,
5510 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5513 /* Otherwise, take the offset from that of the field. Substitute
5514 any PLACEHOLDER_EXPR that we have. */
5516 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5519 /* Return 1 if T is an expression that get_inner_reference handles. */
5522 handled_component_p (tree t)
5524 switch (TREE_CODE (t))
5529 case ARRAY_RANGE_REF:
5530 case VIEW_CONVERT_EXPR:
5540 /* Given an rtx VALUE that may contain additions and multiplications, return
5541 an equivalent value that just refers to a register, memory, or constant.
5542 This is done by generating instructions to perform the arithmetic and
5543 returning a pseudo-register containing the value.
5545 The returned value may be a REG, SUBREG, MEM or constant. */
5548 force_operand (rtx value, rtx target)
5551 /* Use subtarget as the target for operand 0 of a binary operation. */
5552 rtx subtarget = get_subtarget (target);
5553 enum rtx_code code = GET_CODE (value);
5555 /* Check for subreg applied to an expression produced by loop optimizer. */
5557 && !REG_P (SUBREG_REG (value))
5558 && !MEM_P (SUBREG_REG (value)))
5560 value = simplify_gen_subreg (GET_MODE (value),
5561 force_reg (GET_MODE (SUBREG_REG (value)),
5562 force_operand (SUBREG_REG (value),
5564 GET_MODE (SUBREG_REG (value)),
5565 SUBREG_BYTE (value));
5566 code = GET_CODE (value);
5569 /* Check for a PIC address load. */
5570 if ((code == PLUS || code == MINUS)
5571 && XEXP (value, 0) == pic_offset_table_rtx
5572 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5573 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5574 || GET_CODE (XEXP (value, 1)) == CONST))
5577 subtarget = gen_reg_rtx (GET_MODE (value));
5578 emit_move_insn (subtarget, value);
5582 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5585 target = gen_reg_rtx (GET_MODE (value));
5586 convert_move (target, force_operand (XEXP (value, 0), NULL),
5587 code == ZERO_EXTEND);
5591 if (ARITHMETIC_P (value))
5593 op2 = XEXP (value, 1);
5594 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5596 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5599 op2 = negate_rtx (GET_MODE (value), op2);
5602 /* Check for an addition with OP2 a constant integer and our first
5603 operand a PLUS of a virtual register and something else. In that
5604 case, we want to emit the sum of the virtual register and the
5605 constant first and then add the other value. This allows virtual
5606 register instantiation to simply modify the constant rather than
5607 creating another one around this addition. */
5608 if (code == PLUS && GET_CODE (op2) == CONST_INT
5609 && GET_CODE (XEXP (value, 0)) == PLUS
5610 && REG_P (XEXP (XEXP (value, 0), 0))
5611 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5612 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5614 rtx temp = expand_simple_binop (GET_MODE (value), code,
5615 XEXP (XEXP (value, 0), 0), op2,
5616 subtarget, 0, OPTAB_LIB_WIDEN);
5617 return expand_simple_binop (GET_MODE (value), code, temp,
5618 force_operand (XEXP (XEXP (value,
5620 target, 0, OPTAB_LIB_WIDEN);
5623 op1 = force_operand (XEXP (value, 0), subtarget);
5624 op2 = force_operand (op2, NULL_RTX);
5628 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5630 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5631 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5632 target, 1, OPTAB_LIB_WIDEN);
5634 return expand_divmod (0,
5635 FLOAT_MODE_P (GET_MODE (value))
5636 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5637 GET_MODE (value), op1, op2, target, 0);
5640 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5644 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5648 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5652 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5653 target, 0, OPTAB_LIB_WIDEN);
5656 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5657 target, 1, OPTAB_LIB_WIDEN);
5660 if (UNARY_P (value))
5662 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5663 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5666 #ifdef INSN_SCHEDULING
5667 /* On machines that have insn scheduling, we want all memory reference to be
5668 explicit, so we need to deal with such paradoxical SUBREGs. */
5669 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5670 && (GET_MODE_SIZE (GET_MODE (value))
5671 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5673 = simplify_gen_subreg (GET_MODE (value),
5674 force_reg (GET_MODE (SUBREG_REG (value)),
5675 force_operand (SUBREG_REG (value),
5677 GET_MODE (SUBREG_REG (value)),
5678 SUBREG_BYTE (value));
5684 /* Subroutine of expand_expr: return nonzero iff there is no way that
5685 EXP can reference X, which is being modified. TOP_P is nonzero if this
5686 call is going to be used to determine whether we need a temporary
5687 for EXP, as opposed to a recursive call to this function.
5689 It is always safe for this routine to return zero since it merely
5690 searches for optimization opportunities. */
5693 safe_from_p (rtx x, tree exp, int top_p)
5699 /* If EXP has varying size, we MUST use a target since we currently
5700 have no way of allocating temporaries of variable size
5701 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5702 So we assume here that something at a higher level has prevented a
5703 clash. This is somewhat bogus, but the best we can do. Only
5704 do this when X is BLKmode and when we are at the top level. */
5705 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5707 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5708 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5709 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5711 && GET_MODE (x) == BLKmode)
5712 /* If X is in the outgoing argument area, it is always safe. */
5714 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5715 || (GET_CODE (XEXP (x, 0)) == PLUS
5716 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5719 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5720 find the underlying pseudo. */
5721 if (GET_CODE (x) == SUBREG)
5724 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5728 /* Now look at our tree code and possibly recurse. */
5729 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5731 case tcc_declaration:
5732 exp_rtl = DECL_RTL_IF_SET (exp);
5738 case tcc_exceptional:
5739 if (TREE_CODE (exp) == TREE_LIST)
5743 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5745 exp = TREE_CHAIN (exp);
5748 if (TREE_CODE (exp) != TREE_LIST)
5749 return safe_from_p (x, exp, 0);
5752 else if (TREE_CODE (exp) == ERROR_MARK)
5753 return 1; /* An already-visited SAVE_EXPR? */
5758 /* The only case we look at here is the DECL_INITIAL inside a
5760 return (TREE_CODE (exp) != DECL_EXPR
5761 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5762 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5763 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5766 case tcc_comparison:
5767 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5772 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5774 case tcc_expression:
5776 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5777 the expression. If it is set, we conflict iff we are that rtx or
5778 both are in memory. Otherwise, we check all operands of the
5779 expression recursively. */
5781 switch (TREE_CODE (exp))
5784 /* If the operand is static or we are static, we can't conflict.
5785 Likewise if we don't conflict with the operand at all. */
5786 if (staticp (TREE_OPERAND (exp, 0))
5787 || TREE_STATIC (exp)
5788 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5791 /* Otherwise, the only way this can conflict is if we are taking
5792 the address of a DECL a that address if part of X, which is
5794 exp = TREE_OPERAND (exp, 0);
5797 if (!DECL_RTL_SET_P (exp)
5798 || !MEM_P (DECL_RTL (exp)))
5801 exp_rtl = XEXP (DECL_RTL (exp), 0);
5805 case MISALIGNED_INDIRECT_REF:
5806 case ALIGN_INDIRECT_REF:
5809 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5810 get_alias_set (exp)))
5815 /* Assume that the call will clobber all hard registers and
5817 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5822 case WITH_CLEANUP_EXPR:
5823 case CLEANUP_POINT_EXPR:
5824 /* Lowered by gimplify.c. */
5828 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5834 /* If we have an rtx, we do not need to scan our operands. */
5838 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5839 for (i = 0; i < nops; i++)
5840 if (TREE_OPERAND (exp, i) != 0
5841 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5844 /* If this is a language-specific tree code, it may require
5845 special handling. */
5846 if ((unsigned int) TREE_CODE (exp)
5847 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5848 && !lang_hooks.safe_from_p (x, exp))
5853 /* Should never get a type here. */
5857 /* If we have an rtl, find any enclosed object. Then see if we conflict
5861 if (GET_CODE (exp_rtl) == SUBREG)
5863 exp_rtl = SUBREG_REG (exp_rtl);
5865 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5869 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5870 are memory and they conflict. */
5871 return ! (rtx_equal_p (x, exp_rtl)
5872 || (MEM_P (x) && MEM_P (exp_rtl)
5873 && true_dependence (exp_rtl, VOIDmode, x,
5874 rtx_addr_varies_p)));
5877 /* If we reach here, it is safe. */
5882 /* Return the highest power of two that EXP is known to be a multiple of.
5883 This is used in updating alignment of MEMs in array references. */
5885 static unsigned HOST_WIDE_INT
5886 highest_pow2_factor (tree exp)
5888 unsigned HOST_WIDE_INT c0, c1;
5890 switch (TREE_CODE (exp))
5893 /* We can find the lowest bit that's a one. If the low
5894 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5895 We need to handle this case since we can find it in a COND_EXPR,
5896 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5897 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5899 if (TREE_CONSTANT_OVERFLOW (exp))
5900 return BIGGEST_ALIGNMENT;
5903 /* Note: tree_low_cst is intentionally not used here,
5904 we don't care about the upper bits. */
5905 c0 = TREE_INT_CST_LOW (exp);
5907 return c0 ? c0 : BIGGEST_ALIGNMENT;
5911 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5912 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5913 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5914 return MIN (c0, c1);
5917 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5918 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5921 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5923 if (integer_pow2p (TREE_OPERAND (exp, 1))
5924 && host_integerp (TREE_OPERAND (exp, 1), 1))
5926 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5927 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5928 return MAX (1, c0 / c1);
5932 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5934 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5937 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5940 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5941 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5942 return MIN (c0, c1);
5951 /* Similar, except that the alignment requirements of TARGET are
5952 taken into account. Assume it is at least as aligned as its
5953 type, unless it is a COMPONENT_REF in which case the layout of
5954 the structure gives the alignment. */
5956 static unsigned HOST_WIDE_INT
5957 highest_pow2_factor_for_target (tree target, tree exp)
5959 unsigned HOST_WIDE_INT target_align, factor;
5961 factor = highest_pow2_factor (exp);
5962 if (TREE_CODE (target) == COMPONENT_REF)
5963 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5965 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5966 return MAX (factor, target_align);
5969 /* Expands variable VAR. */
5972 expand_var (tree var)
5974 if (DECL_EXTERNAL (var))
5977 if (TREE_STATIC (var))
5978 /* If this is an inlined copy of a static local variable,
5979 look up the original decl. */
5980 var = DECL_ORIGIN (var);
5982 if (TREE_STATIC (var)
5983 ? !TREE_ASM_WRITTEN (var)
5984 : !DECL_RTL_SET_P (var))
5986 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5987 /* Should be ignored. */;
5988 else if (lang_hooks.expand_decl (var))
5990 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5992 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5993 rest_of_decl_compilation (var, 0, 0);
5995 /* No expansion needed. */
5996 gcc_assert (TREE_CODE (var) == TYPE_DECL
5997 || TREE_CODE (var) == CONST_DECL
5998 || TREE_CODE (var) == FUNCTION_DECL
5999 || TREE_CODE (var) == LABEL_DECL);
6003 /* Subroutine of expand_expr. Expand the two operands of a binary
6004 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6005 The value may be stored in TARGET if TARGET is nonzero. The
6006 MODIFIER argument is as documented by expand_expr. */
6009 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6010 enum expand_modifier modifier)
6012 if (! safe_from_p (target, exp1, 1))
6014 if (operand_equal_p (exp0, exp1, 0))
6016 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6017 *op1 = copy_rtx (*op0);
6021 /* If we need to preserve evaluation order, copy exp0 into its own
6022 temporary variable so that it can't be clobbered by exp1. */
6023 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6024 exp0 = save_expr (exp0);
6025 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6026 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6031 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6032 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6035 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6036 enum expand_modifier modifier)
6038 rtx result, subtarget;
6040 HOST_WIDE_INT bitsize, bitpos;
6041 int volatilep, unsignedp;
6042 enum machine_mode mode1;
6044 /* If we are taking the address of a constant and are at the top level,
6045 we have to use output_constant_def since we can't call force_const_mem
6047 /* ??? This should be considered a front-end bug. We should not be
6048 generating ADDR_EXPR of something that isn't an LVALUE. The only
6049 exception here is STRING_CST. */
6050 if (TREE_CODE (exp) == CONSTRUCTOR
6051 || CONSTANT_CLASS_P (exp))
6052 return XEXP (output_constant_def (exp, 0), 0);
6054 /* Everything must be something allowed by is_gimple_addressable. */
6055 switch (TREE_CODE (exp))
6058 /* This case will happen via recursion for &a->b. */
6059 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6062 /* Recurse and make the output_constant_def clause above handle this. */
6063 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6067 /* The real part of the complex number is always first, therefore
6068 the address is the same as the address of the parent object. */
6071 inner = TREE_OPERAND (exp, 0);
6075 /* The imaginary part of the complex number is always second.
6076 The expression is therefore always offset by the size of the
6079 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6080 inner = TREE_OPERAND (exp, 0);
6084 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6085 expand_expr, as that can have various side effects; LABEL_DECLs for
6086 example, may not have their DECL_RTL set yet. Assume language
6087 specific tree nodes can be expanded in some interesting way. */
6089 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6091 result = expand_expr (exp, target, tmode,
6092 modifier == EXPAND_INITIALIZER
6093 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6095 /* If the DECL isn't in memory, then the DECL wasn't properly
6096 marked TREE_ADDRESSABLE, which will be either a front-end
6097 or a tree optimizer bug. */
6098 gcc_assert (GET_CODE (result) == MEM);
6099 result = XEXP (result, 0);
6101 /* ??? Is this needed anymore? */
6102 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6104 assemble_external (exp);
6105 TREE_USED (exp) = 1;
6108 if (modifier != EXPAND_INITIALIZER
6109 && modifier != EXPAND_CONST_ADDRESS)
6110 result = force_operand (result, target);
6114 /* Pass FALSE as the last argument to get_inner_reference although
6115 we are expanding to RTL. The rationale is that we know how to
6116 handle "aligning nodes" here: we can just bypass them because
6117 they won't change the final object whose address will be returned
6118 (they actually exist only for that purpose). */
6119 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6120 &mode1, &unsignedp, &volatilep, false);
6124 /* We must have made progress. */
6125 gcc_assert (inner != exp);
6127 subtarget = offset || bitpos ? NULL_RTX : target;
6128 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6134 if (modifier != EXPAND_NORMAL)
6135 result = force_operand (result, NULL);
6136 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6138 result = convert_memory_address (tmode, result);
6139 tmp = convert_memory_address (tmode, tmp);
6141 if (modifier == EXPAND_SUM)
6142 result = gen_rtx_PLUS (tmode, result, tmp);
6145 subtarget = bitpos ? NULL_RTX : target;
6146 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6147 1, OPTAB_LIB_WIDEN);
6153 /* Someone beforehand should have rejected taking the address
6154 of such an object. */
6155 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6157 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6158 if (modifier < EXPAND_SUM)
6159 result = force_operand (result, target);
6165 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6166 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6169 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6170 enum expand_modifier modifier)
6172 enum machine_mode rmode;
6175 /* Target mode of VOIDmode says "whatever's natural". */
6176 if (tmode == VOIDmode)
6177 tmode = TYPE_MODE (TREE_TYPE (exp));
6179 /* We can get called with some Weird Things if the user does silliness
6180 like "(short) &a". In that case, convert_memory_address won't do
6181 the right thing, so ignore the given target mode. */
6182 if (tmode != Pmode && tmode != ptr_mode)
6185 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6188 /* Despite expand_expr claims concerning ignoring TMODE when not
6189 strictly convenient, stuff breaks if we don't honor it. Note
6190 that combined with the above, we only do this for pointer modes. */
6191 rmode = GET_MODE (result);
6192 if (rmode == VOIDmode)
6195 result = convert_memory_address (tmode, result);
6201 /* expand_expr: generate code for computing expression EXP.
6202 An rtx for the computed value is returned. The value is never null.
6203 In the case of a void EXP, const0_rtx is returned.
6205 The value may be stored in TARGET if TARGET is nonzero.
6206 TARGET is just a suggestion; callers must assume that
6207 the rtx returned may not be the same as TARGET.
6209 If TARGET is CONST0_RTX, it means that the value will be ignored.
6211 If TMODE is not VOIDmode, it suggests generating the
6212 result in mode TMODE. But this is done only when convenient.
6213 Otherwise, TMODE is ignored and the value generated in its natural mode.
6214 TMODE is just a suggestion; callers must assume that
6215 the rtx returned may not have mode TMODE.
6217 Note that TARGET may have neither TMODE nor MODE. In that case, it
6218 probably will not be used.
6220 If MODIFIER is EXPAND_SUM then when EXP is an addition
6221 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6222 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6223 products as above, or REG or MEM, or constant.
6224 Ordinarily in such cases we would output mul or add instructions
6225 and then return a pseudo reg containing the sum.
6227 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6228 it also marks a label as absolutely required (it can't be dead).
6229 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6230 This is used for outputting expressions used in initializers.
6232 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6233 with a constant address even if that address is not normally legitimate.
6234 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6236 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6237 a call parameter. Such targets require special care as we haven't yet
6238 marked TARGET so that it's safe from being trashed by libcalls. We
6239 don't want to use TARGET for anything but the final result;
6240 Intermediate values must go elsewhere. Additionally, calls to
6241 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6243 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6244 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6245 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6246 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6249 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6250 enum expand_modifier, rtx *);
6253 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6254 enum expand_modifier modifier, rtx *alt_rtl)
6257 rtx ret, last = NULL;
6259 /* Handle ERROR_MARK before anybody tries to access its type. */
6260 if (TREE_CODE (exp) == ERROR_MARK
6261 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6263 ret = CONST0_RTX (tmode);
6264 return ret ? ret : const0_rtx;
6267 if (flag_non_call_exceptions)
6269 rn = lookup_stmt_eh_region (exp);
6270 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6272 last = get_last_insn ();
6275 /* If this is an expression of some kind and it has an associated line
6276 number, then emit the line number before expanding the expression.
6278 We need to save and restore the file and line information so that
6279 errors discovered during expansion are emitted with the right
6280 information. It would be better of the diagnostic routines
6281 used the file/line information embedded in the tree nodes rather
6283 if (cfun && EXPR_HAS_LOCATION (exp))
6285 location_t saved_location = input_location;
6286 input_location = EXPR_LOCATION (exp);
6287 emit_line_note (input_location);
6289 /* Record where the insns produced belong. */
6290 record_block_change (TREE_BLOCK (exp));
6292 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6294 input_location = saved_location;
6298 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6301 /* If using non-call exceptions, mark all insns that may trap.
6302 expand_call() will mark CALL_INSNs before we get to this code,
6303 but it doesn't handle libcalls, and these may trap. */
6307 for (insn = next_real_insn (last); insn;
6308 insn = next_real_insn (insn))
6310 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6311 /* If we want exceptions for non-call insns, any
6312 may_trap_p instruction may throw. */
6313 && GET_CODE (PATTERN (insn)) != CLOBBER
6314 && GET_CODE (PATTERN (insn)) != USE
6315 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6317 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6327 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6328 enum expand_modifier modifier, rtx *alt_rtl)
6331 tree type = TREE_TYPE (exp);
6333 enum machine_mode mode;
6334 enum tree_code code = TREE_CODE (exp);
6336 rtx subtarget, original_target;
6339 bool reduce_bit_field = false;
6340 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6341 ? reduce_to_bit_field_precision ((expr), \
6346 mode = TYPE_MODE (type);
6347 unsignedp = TYPE_UNSIGNED (type);
6348 if (lang_hooks.reduce_bit_field_operations
6349 && TREE_CODE (type) == INTEGER_TYPE
6350 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6352 /* An operation in what may be a bit-field type needs the
6353 result to be reduced to the precision of the bit-field type,
6354 which is narrower than that of the type's mode. */
6355 reduce_bit_field = true;
6356 if (modifier == EXPAND_STACK_PARM)
6360 /* Use subtarget as the target for operand 0 of a binary operation. */
6361 subtarget = get_subtarget (target);
6362 original_target = target;
6363 ignore = (target == const0_rtx
6364 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6365 || code == CONVERT_EXPR || code == COND_EXPR
6366 || code == VIEW_CONVERT_EXPR)
6367 && TREE_CODE (type) == VOID_TYPE));
6369 /* If we are going to ignore this result, we need only do something
6370 if there is a side-effect somewhere in the expression. If there
6371 is, short-circuit the most common cases here. Note that we must
6372 not call expand_expr with anything but const0_rtx in case this
6373 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6377 if (! TREE_SIDE_EFFECTS (exp))
6380 /* Ensure we reference a volatile object even if value is ignored, but
6381 don't do this if all we are doing is taking its address. */
6382 if (TREE_THIS_VOLATILE (exp)
6383 && TREE_CODE (exp) != FUNCTION_DECL
6384 && mode != VOIDmode && mode != BLKmode
6385 && modifier != EXPAND_CONST_ADDRESS)
6387 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6389 temp = copy_to_reg (temp);
6393 if (TREE_CODE_CLASS (code) == tcc_unary
6394 || code == COMPONENT_REF || code == INDIRECT_REF)
6395 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6398 else if (TREE_CODE_CLASS (code) == tcc_binary
6399 || TREE_CODE_CLASS (code) == tcc_comparison
6400 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6402 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6403 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6406 else if (code == BIT_FIELD_REF)
6408 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6409 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6410 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6417 /* If will do cse, generate all results into pseudo registers
6418 since 1) that allows cse to find more things
6419 and 2) otherwise cse could produce an insn the machine
6420 cannot support. An exception is a CONSTRUCTOR into a multi-word
6421 MEM: that's much more likely to be most efficient into the MEM.
6422 Another is a CALL_EXPR which must return in memory. */
6424 if (! cse_not_expected && mode != BLKmode && target
6425 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6426 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6427 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6434 tree function = decl_function_context (exp);
6436 temp = label_rtx (exp);
6437 temp = gen_rtx_LABEL_REF (Pmode, temp);
6439 if (function != current_function_decl
6441 LABEL_REF_NONLOCAL_P (temp) = 1;
6443 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6448 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6453 /* If a static var's type was incomplete when the decl was written,
6454 but the type is complete now, lay out the decl now. */
6455 if (DECL_SIZE (exp) == 0
6456 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6457 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6458 layout_decl (exp, 0);
6460 /* ... fall through ... */
6464 gcc_assert (DECL_RTL (exp));
6466 /* Ensure variable marked as used even if it doesn't go through
6467 a parser. If it hasn't be used yet, write out an external
6469 if (! TREE_USED (exp))
6471 assemble_external (exp);
6472 TREE_USED (exp) = 1;
6475 /* Show we haven't gotten RTL for this yet. */
6478 /* Variables inherited from containing functions should have
6479 been lowered by this point. */
6480 context = decl_function_context (exp);
6481 gcc_assert (!context
6482 || context == current_function_decl
6483 || TREE_STATIC (exp)
6484 /* ??? C++ creates functions that are not TREE_STATIC. */
6485 || TREE_CODE (exp) == FUNCTION_DECL);
6487 /* This is the case of an array whose size is to be determined
6488 from its initializer, while the initializer is still being parsed.
6491 if (MEM_P (DECL_RTL (exp))
6492 && REG_P (XEXP (DECL_RTL (exp), 0)))
6493 temp = validize_mem (DECL_RTL (exp));
6495 /* If DECL_RTL is memory, we are in the normal case and either
6496 the address is not valid or it is not a register and -fforce-addr
6497 is specified, get the address into a register. */
6499 else if (MEM_P (DECL_RTL (exp))
6500 && modifier != EXPAND_CONST_ADDRESS
6501 && modifier != EXPAND_SUM
6502 && modifier != EXPAND_INITIALIZER
6503 && (! memory_address_p (DECL_MODE (exp),
6504 XEXP (DECL_RTL (exp), 0))
6506 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6509 *alt_rtl = DECL_RTL (exp);
6510 temp = replace_equiv_address (DECL_RTL (exp),
6511 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6514 /* If we got something, return it. But first, set the alignment
6515 if the address is a register. */
6518 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6519 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6524 /* If the mode of DECL_RTL does not match that of the decl, it
6525 must be a promoted value. We return a SUBREG of the wanted mode,
6526 but mark it so that we know that it was already extended. */
6528 if (REG_P (DECL_RTL (exp))
6529 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6531 enum machine_mode pmode;
6533 /* Get the signedness used for this variable. Ensure we get the
6534 same mode we got when the variable was declared. */
6535 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6536 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6537 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6539 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6540 SUBREG_PROMOTED_VAR_P (temp) = 1;
6541 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6545 return DECL_RTL (exp);
6548 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6549 TREE_INT_CST_HIGH (exp), mode);
6551 /* ??? If overflow is set, fold will have done an incomplete job,
6552 which can result in (plus xx (const_int 0)), which can get
6553 simplified by validate_replace_rtx during virtual register
6554 instantiation, which can result in unrecognizable insns.
6555 Avoid this by forcing all overflows into registers. */
6556 if (TREE_CONSTANT_OVERFLOW (exp)
6557 && modifier != EXPAND_INITIALIZER)
6558 temp = force_reg (mode, temp);
6563 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6564 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6565 return const_vector_from_tree (exp);
6567 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6568 TREE_VECTOR_CST_ELTS (exp)),
6569 ignore ? const0_rtx : target, tmode, modifier);
6572 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6575 /* If optimized, generate immediate CONST_DOUBLE
6576 which will be turned into memory by reload if necessary.
6578 We used to force a register so that loop.c could see it. But
6579 this does not allow gen_* patterns to perform optimizations with
6580 the constants. It also produces two insns in cases like "x = 1.0;".
6581 On most machines, floating-point constants are not permitted in
6582 many insns, so we'd end up copying it to a register in any case.
6584 Now, we do the copying in expand_binop, if appropriate. */
6585 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6586 TYPE_MODE (TREE_TYPE (exp)));
6589 /* Handle evaluating a complex constant in a CONCAT target. */
6590 if (original_target && GET_CODE (original_target) == CONCAT)
6592 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6595 rtarg = XEXP (original_target, 0);
6596 itarg = XEXP (original_target, 1);
6598 /* Move the real and imaginary parts separately. */
6599 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6600 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6603 emit_move_insn (rtarg, op0);
6605 emit_move_insn (itarg, op1);
6607 return original_target;
6610 /* ... fall through ... */
6613 temp = output_constant_def (exp, 1);
6615 /* temp contains a constant address.
6616 On RISC machines where a constant address isn't valid,
6617 make some insns to get that address into a register. */
6618 if (modifier != EXPAND_CONST_ADDRESS
6619 && modifier != EXPAND_INITIALIZER
6620 && modifier != EXPAND_SUM
6621 && (! memory_address_p (mode, XEXP (temp, 0))
6622 || flag_force_addr))
6623 return replace_equiv_address (temp,
6624 copy_rtx (XEXP (temp, 0)));
6629 tree val = TREE_OPERAND (exp, 0);
6630 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6632 if (!SAVE_EXPR_RESOLVED_P (exp))
6634 /* We can indeed still hit this case, typically via builtin
6635 expanders calling save_expr immediately before expanding
6636 something. Assume this means that we only have to deal
6637 with non-BLKmode values. */
6638 gcc_assert (GET_MODE (ret) != BLKmode);
6640 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6641 DECL_ARTIFICIAL (val) = 1;
6642 DECL_IGNORED_P (val) = 1;
6643 TREE_OPERAND (exp, 0) = val;
6644 SAVE_EXPR_RESOLVED_P (exp) = 1;
6646 if (!CONSTANT_P (ret))
6647 ret = copy_to_reg (ret);
6648 SET_DECL_RTL (val, ret);
6655 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6656 expand_goto (TREE_OPERAND (exp, 0));
6658 expand_computed_goto (TREE_OPERAND (exp, 0));
6662 /* If we don't need the result, just ensure we evaluate any
6668 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6669 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6674 /* All elts simple constants => refer to a constant in memory. But
6675 if this is a non-BLKmode mode, let it store a field at a time
6676 since that should make a CONST_INT or CONST_DOUBLE when we
6677 fold. Likewise, if we have a target we can use, it is best to
6678 store directly into the target unless the type is large enough
6679 that memcpy will be used. If we are making an initializer and
6680 all operands are constant, put it in memory as well.
6682 FIXME: Avoid trying to fill vector constructors piece-meal.
6683 Output them with output_constant_def below unless we're sure
6684 they're zeros. This should go away when vector initializers
6685 are treated like VECTOR_CST instead of arrays.
6687 else if ((TREE_STATIC (exp)
6688 && ((mode == BLKmode
6689 && ! (target != 0 && safe_from_p (target, exp, 1)))
6690 || TREE_ADDRESSABLE (exp)
6691 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6692 && (! MOVE_BY_PIECES_P
6693 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6695 && ! mostly_zeros_p (exp))))
6696 || ((modifier == EXPAND_INITIALIZER
6697 || modifier == EXPAND_CONST_ADDRESS)
6698 && TREE_CONSTANT (exp)))
6700 rtx constructor = output_constant_def (exp, 1);
6702 if (modifier != EXPAND_CONST_ADDRESS
6703 && modifier != EXPAND_INITIALIZER
6704 && modifier != EXPAND_SUM)
6705 constructor = validize_mem (constructor);
6711 /* Handle calls that pass values in multiple non-contiguous
6712 locations. The Irix 6 ABI has examples of this. */
6713 if (target == 0 || ! safe_from_p (target, exp, 1)
6714 || GET_CODE (target) == PARALLEL
6715 || modifier == EXPAND_STACK_PARM)
6717 = assign_temp (build_qualified_type (type,
6719 | (TREE_READONLY (exp)
6720 * TYPE_QUAL_CONST))),
6721 0, TREE_ADDRESSABLE (exp), 1);
6723 store_constructor (exp, target, 0, int_expr_size (exp));
6727 case MISALIGNED_INDIRECT_REF:
6728 case ALIGN_INDIRECT_REF:
6731 tree exp1 = TREE_OPERAND (exp, 0);
6734 if (modifier != EXPAND_WRITE)
6738 t = fold_read_from_constant_string (exp);
6740 return expand_expr (t, target, tmode, modifier);
6743 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6744 op0 = memory_address (mode, op0);
6746 if (code == ALIGN_INDIRECT_REF)
6748 int align = TYPE_ALIGN_UNIT (type);
6749 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6750 op0 = memory_address (mode, op0);
6753 temp = gen_rtx_MEM (mode, op0);
6755 orig = REF_ORIGINAL (exp);
6758 set_mem_attributes (temp, orig, 0);
6760 /* Resolve the misalignment now, so that we don't have to remember
6761 to resolve it later. Of course, this only works for reads. */
6762 /* ??? When we get around to supporting writes, we'll have to handle
6763 this in store_expr directly. The vectorizer isn't generating
6764 those yet, however. */
6765 if (code == MISALIGNED_INDIRECT_REF)
6770 gcc_assert (modifier == EXPAND_NORMAL);
6772 /* The vectorizer should have already checked the mode. */
6773 icode = movmisalign_optab->handlers[mode].insn_code;
6774 gcc_assert (icode != CODE_FOR_nothing);
6776 /* We've already validated the memory, and we're creating a
6777 new pseudo destination. The predicates really can't fail. */
6778 reg = gen_reg_rtx (mode);
6780 /* Nor can the insn generator. */
6781 insn = GEN_FCN (icode) (reg, temp);
6793 tree array = TREE_OPERAND (exp, 0);
6794 tree index = TREE_OPERAND (exp, 1);
6796 /* Fold an expression like: "foo"[2].
6797 This is not done in fold so it won't happen inside &.
6798 Don't fold if this is for wide characters since it's too
6799 difficult to do correctly and this is a very rare case. */
6801 if (modifier != EXPAND_CONST_ADDRESS
6802 && modifier != EXPAND_INITIALIZER
6803 && modifier != EXPAND_MEMORY)
6805 tree t = fold_read_from_constant_string (exp);
6808 return expand_expr (t, target, tmode, modifier);
6811 /* If this is a constant index into a constant array,
6812 just get the value from the array. Handle both the cases when
6813 we have an explicit constructor and when our operand is a variable
6814 that was declared const. */
6816 if (modifier != EXPAND_CONST_ADDRESS
6817 && modifier != EXPAND_INITIALIZER
6818 && modifier != EXPAND_MEMORY
6819 && TREE_CODE (array) == CONSTRUCTOR
6820 && ! TREE_SIDE_EFFECTS (array)
6821 && TREE_CODE (index) == INTEGER_CST)
6825 for (elem = CONSTRUCTOR_ELTS (array);
6826 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6827 elem = TREE_CHAIN (elem))
6830 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6831 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6835 else if (optimize >= 1
6836 && modifier != EXPAND_CONST_ADDRESS
6837 && modifier != EXPAND_INITIALIZER
6838 && modifier != EXPAND_MEMORY
6839 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6840 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6841 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6842 && targetm.binds_local_p (array))
6844 if (TREE_CODE (index) == INTEGER_CST)
6846 tree init = DECL_INITIAL (array);
6848 if (TREE_CODE (init) == CONSTRUCTOR)
6852 for (elem = CONSTRUCTOR_ELTS (init);
6854 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6855 elem = TREE_CHAIN (elem))
6858 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6859 return expand_expr (fold (TREE_VALUE (elem)), target,
6862 else if (TREE_CODE (init) == STRING_CST
6863 && 0 > compare_tree_int (index,
6864 TREE_STRING_LENGTH (init)))
6866 tree type = TREE_TYPE (TREE_TYPE (init));
6867 enum machine_mode mode = TYPE_MODE (type);
6869 if (GET_MODE_CLASS (mode) == MODE_INT
6870 && GET_MODE_SIZE (mode) == 1)
6871 return gen_int_mode (TREE_STRING_POINTER (init)
6872 [TREE_INT_CST_LOW (index)], mode);
6877 goto normal_inner_ref;
6880 /* If the operand is a CONSTRUCTOR, we can just extract the
6881 appropriate field if it is present. */
6882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6886 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6887 elt = TREE_CHAIN (elt))
6888 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6889 /* We can normally use the value of the field in the
6890 CONSTRUCTOR. However, if this is a bitfield in
6891 an integral mode that we can fit in a HOST_WIDE_INT,
6892 we must mask only the number of bits in the bitfield,
6893 since this is done implicitly by the constructor. If
6894 the bitfield does not meet either of those conditions,
6895 we can't do this optimization. */
6896 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6897 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6899 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6900 <= HOST_BITS_PER_WIDE_INT))))
6902 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6903 && modifier == EXPAND_STACK_PARM)
6905 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6906 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6908 HOST_WIDE_INT bitsize
6909 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6910 enum machine_mode imode
6911 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6913 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6915 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6916 op0 = expand_and (imode, op0, op1, target);
6921 = build_int_cst (NULL_TREE,
6922 GET_MODE_BITSIZE (imode) - bitsize);
6924 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6926 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6934 goto normal_inner_ref;
6937 case ARRAY_RANGE_REF:
6940 enum machine_mode mode1;
6941 HOST_WIDE_INT bitsize, bitpos;
6944 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6945 &mode1, &unsignedp, &volatilep, true);
6948 /* If we got back the original object, something is wrong. Perhaps
6949 we are evaluating an expression too early. In any event, don't
6950 infinitely recurse. */
6951 gcc_assert (tem != exp);
6953 /* If TEM's type is a union of variable size, pass TARGET to the inner
6954 computation, since it will need a temporary and TARGET is known
6955 to have to do. This occurs in unchecked conversion in Ada. */
6959 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6960 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6962 && modifier != EXPAND_STACK_PARM
6963 ? target : NULL_RTX),
6965 (modifier == EXPAND_INITIALIZER
6966 || modifier == EXPAND_CONST_ADDRESS
6967 || modifier == EXPAND_STACK_PARM)
6968 ? modifier : EXPAND_NORMAL);
6970 /* If this is a constant, put it into a register if it is a
6971 legitimate constant and OFFSET is 0 and memory if it isn't. */
6972 if (CONSTANT_P (op0))
6974 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6975 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6977 op0 = force_reg (mode, op0);
6979 op0 = validize_mem (force_const_mem (mode, op0));
6982 /* Otherwise, if this object not in memory and we either have an
6983 offset or a BLKmode result, put it there. This case can't occur in
6984 C, but can in Ada if we have unchecked conversion of an expression
6985 from a scalar type to an array or record type or for an
6986 ARRAY_RANGE_REF whose type is BLKmode. */
6987 else if (!MEM_P (op0)
6989 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6991 tree nt = build_qualified_type (TREE_TYPE (tem),
6992 (TYPE_QUALS (TREE_TYPE (tem))
6993 | TYPE_QUAL_CONST));
6994 rtx memloc = assign_temp (nt, 1, 1, 1);
6996 emit_move_insn (memloc, op0);
7002 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7005 gcc_assert (MEM_P (op0));
7007 #ifdef POINTERS_EXTEND_UNSIGNED
7008 if (GET_MODE (offset_rtx) != Pmode)
7009 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7011 if (GET_MODE (offset_rtx) != ptr_mode)
7012 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7015 if (GET_MODE (op0) == BLKmode
7016 /* A constant address in OP0 can have VOIDmode, we must
7017 not try to call force_reg in that case. */
7018 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7020 && (bitpos % bitsize) == 0
7021 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7022 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7024 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7028 op0 = offset_address (op0, offset_rtx,
7029 highest_pow2_factor (offset));
7032 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7033 record its alignment as BIGGEST_ALIGNMENT. */
7034 if (MEM_P (op0) && bitpos == 0 && offset != 0
7035 && is_aligning_offset (offset, tem))
7036 set_mem_align (op0, BIGGEST_ALIGNMENT);
7038 /* Don't forget about volatility even if this is a bitfield. */
7039 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7041 if (op0 == orig_op0)
7042 op0 = copy_rtx (op0);
7044 MEM_VOLATILE_P (op0) = 1;
7047 /* The following code doesn't handle CONCAT.
7048 Assume only bitpos == 0 can be used for CONCAT, due to
7049 one element arrays having the same mode as its element. */
7050 if (GET_CODE (op0) == CONCAT)
7052 gcc_assert (bitpos == 0
7053 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7057 /* In cases where an aligned union has an unaligned object
7058 as a field, we might be extracting a BLKmode value from
7059 an integer-mode (e.g., SImode) object. Handle this case
7060 by doing the extract into an object as wide as the field
7061 (which we know to be the width of a basic mode), then
7062 storing into memory, and changing the mode to BLKmode. */
7063 if (mode1 == VOIDmode
7064 || REG_P (op0) || GET_CODE (op0) == SUBREG
7065 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7066 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7067 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7068 && modifier != EXPAND_CONST_ADDRESS
7069 && modifier != EXPAND_INITIALIZER)
7070 /* If the field isn't aligned enough to fetch as a memref,
7071 fetch it as a bit field. */
7072 || (mode1 != BLKmode
7073 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7074 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7076 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7077 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7078 && ((modifier == EXPAND_CONST_ADDRESS
7079 || modifier == EXPAND_INITIALIZER)
7081 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7082 || (bitpos % BITS_PER_UNIT != 0)))
7083 /* If the type and the field are a constant size and the
7084 size of the type isn't the same size as the bitfield,
7085 we must use bitfield operations. */
7087 && TYPE_SIZE (TREE_TYPE (exp))
7088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7089 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7092 enum machine_mode ext_mode = mode;
7094 if (ext_mode == BLKmode
7095 && ! (target != 0 && MEM_P (op0)
7097 && bitpos % BITS_PER_UNIT == 0))
7098 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7100 if (ext_mode == BLKmode)
7103 target = assign_temp (type, 0, 1, 1);
7108 /* In this case, BITPOS must start at a byte boundary and
7109 TARGET, if specified, must be a MEM. */
7110 gcc_assert (MEM_P (op0)
7111 && (!target || MEM_P (target))
7112 && !(bitpos % BITS_PER_UNIT));
7114 emit_block_move (target,
7115 adjust_address (op0, VOIDmode,
7116 bitpos / BITS_PER_UNIT),
7117 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7119 (modifier == EXPAND_STACK_PARM
7120 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7125 op0 = validize_mem (op0);
7127 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7128 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7130 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7131 (modifier == EXPAND_STACK_PARM
7132 ? NULL_RTX : target),
7133 ext_mode, ext_mode);
7135 /* If the result is a record type and BITSIZE is narrower than
7136 the mode of OP0, an integral mode, and this is a big endian
7137 machine, we must put the field into the high-order bits. */
7138 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7139 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7140 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7141 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7142 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7146 /* If the result type is BLKmode, store the data into a temporary
7147 of the appropriate type, but with the mode corresponding to the
7148 mode for the data we have (op0's mode). It's tempting to make
7149 this a constant type, since we know it's only being stored once,
7150 but that can cause problems if we are taking the address of this
7151 COMPONENT_REF because the MEM of any reference via that address
7152 will have flags corresponding to the type, which will not
7153 necessarily be constant. */
7154 if (mode == BLKmode)
7157 = assign_stack_temp_for_type
7158 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7160 emit_move_insn (new, op0);
7161 op0 = copy_rtx (new);
7162 PUT_MODE (op0, BLKmode);
7163 set_mem_attributes (op0, exp, 1);
7169 /* If the result is BLKmode, use that to access the object
7171 if (mode == BLKmode)
7174 /* Get a reference to just this component. */
7175 if (modifier == EXPAND_CONST_ADDRESS
7176 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7177 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7179 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7181 if (op0 == orig_op0)
7182 op0 = copy_rtx (op0);
7184 set_mem_attributes (op0, exp, 0);
7185 if (REG_P (XEXP (op0, 0)))
7186 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7188 MEM_VOLATILE_P (op0) |= volatilep;
7189 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7190 || modifier == EXPAND_CONST_ADDRESS
7191 || modifier == EXPAND_INITIALIZER)
7193 else if (target == 0)
7194 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7196 convert_move (target, op0, unsignedp);
7201 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7204 /* Check for a built-in function. */
7205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7206 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7208 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7210 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7211 == BUILT_IN_FRONTEND)
7212 return lang_hooks.expand_expr (exp, original_target,
7216 return expand_builtin (exp, target, subtarget, tmode, ignore);
7219 return expand_call (exp, target, ignore);
7221 case NON_LVALUE_EXPR:
7224 if (TREE_OPERAND (exp, 0) == error_mark_node)
7227 if (TREE_CODE (type) == UNION_TYPE)
7229 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7231 /* If both input and output are BLKmode, this conversion isn't doing
7232 anything except possibly changing memory attribute. */
7233 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7235 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7238 result = copy_rtx (result);
7239 set_mem_attributes (result, exp, 0);
7245 if (TYPE_MODE (type) != BLKmode)
7246 target = gen_reg_rtx (TYPE_MODE (type));
7248 target = assign_temp (type, 0, 1, 1);
7252 /* Store data into beginning of memory target. */
7253 store_expr (TREE_OPERAND (exp, 0),
7254 adjust_address (target, TYPE_MODE (valtype), 0),
7255 modifier == EXPAND_STACK_PARM);
7259 gcc_assert (REG_P (target));
7261 /* Store this field into a union of the proper type. */
7262 store_field (target,
7263 MIN ((int_size_in_bytes (TREE_TYPE
7264 (TREE_OPERAND (exp, 0)))
7266 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7267 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7271 /* Return the entire union. */
7275 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7280 /* If the signedness of the conversion differs and OP0 is
7281 a promoted SUBREG, clear that indication since we now
7282 have to do the proper extension. */
7283 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7284 && GET_CODE (op0) == SUBREG)
7285 SUBREG_PROMOTED_VAR_P (op0) = 0;
7287 return REDUCE_BIT_FIELD (op0);
7290 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7291 if (GET_MODE (op0) == mode)
7294 /* If OP0 is a constant, just convert it into the proper mode. */
7295 else if (CONSTANT_P (op0))
7297 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7298 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7300 if (modifier == EXPAND_INITIALIZER)
7301 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7302 subreg_lowpart_offset (mode,
7305 op0= convert_modes (mode, inner_mode, op0,
7306 TYPE_UNSIGNED (inner_type));
7309 else if (modifier == EXPAND_INITIALIZER)
7310 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7312 else if (target == 0)
7313 op0 = convert_to_mode (mode, op0,
7314 TYPE_UNSIGNED (TREE_TYPE
7315 (TREE_OPERAND (exp, 0))));
7318 convert_move (target, op0,
7319 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7323 return REDUCE_BIT_FIELD (op0);
7325 case VIEW_CONVERT_EXPR:
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7328 /* If the input and output modes are both the same, we are done.
7329 Otherwise, if neither mode is BLKmode and both are integral and within
7330 a word, we can use gen_lowpart. If neither is true, make sure the
7331 operand is in memory and convert the MEM to the new mode. */
7332 if (TYPE_MODE (type) == GET_MODE (op0))
7334 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7335 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7336 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7337 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7338 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7339 op0 = gen_lowpart (TYPE_MODE (type), op0);
7340 else if (!MEM_P (op0))
7342 /* If the operand is not a MEM, force it into memory. Since we
7343 are going to be be changing the mode of the MEM, don't call
7344 force_const_mem for constants because we don't allow pool
7345 constants to change mode. */
7346 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7348 gcc_assert (!TREE_ADDRESSABLE (exp));
7350 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7352 = assign_stack_temp_for_type
7353 (TYPE_MODE (inner_type),
7354 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7356 emit_move_insn (target, op0);
7360 /* At this point, OP0 is in the correct mode. If the output type is such
7361 that the operand is known to be aligned, indicate that it is.
7362 Otherwise, we need only be concerned about alignment for non-BLKmode
7366 op0 = copy_rtx (op0);
7368 if (TYPE_ALIGN_OK (type))
7369 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7370 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7371 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7373 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7374 HOST_WIDE_INT temp_size
7375 = MAX (int_size_in_bytes (inner_type),
7376 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7377 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7378 temp_size, 0, type);
7379 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7381 gcc_assert (!TREE_ADDRESSABLE (exp));
7383 if (GET_MODE (op0) == BLKmode)
7384 emit_block_move (new_with_op0_mode, op0,
7385 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7386 (modifier == EXPAND_STACK_PARM
7387 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7389 emit_move_insn (new_with_op0_mode, op0);
7394 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7400 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7401 something else, make sure we add the register to the constant and
7402 then to the other thing. This case can occur during strength
7403 reduction and doing it this way will produce better code if the
7404 frame pointer or argument pointer is eliminated.
7406 fold-const.c will ensure that the constant is always in the inner
7407 PLUS_EXPR, so the only case we need to do anything about is if
7408 sp, ap, or fp is our second argument, in which case we must swap
7409 the innermost first argument and our second argument. */
7411 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7412 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7413 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7414 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7415 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7416 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7418 tree t = TREE_OPERAND (exp, 1);
7420 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7421 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7424 /* If the result is to be ptr_mode and we are adding an integer to
7425 something, we might be forming a constant. So try to use
7426 plus_constant. If it produces a sum and we can't accept it,
7427 use force_operand. This allows P = &ARR[const] to generate
7428 efficient code on machines where a SYMBOL_REF is not a valid
7431 If this is an EXPAND_SUM call, always return the sum. */
7432 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7433 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7435 if (modifier == EXPAND_STACK_PARM)
7437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7438 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7439 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7443 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7445 /* Use immed_double_const to ensure that the constant is
7446 truncated according to the mode of OP1, then sign extended
7447 to a HOST_WIDE_INT. Using the constant directly can result
7448 in non-canonical RTL in a 64x32 cross compile. */
7450 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7452 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7453 op1 = plus_constant (op1, INTVAL (constant_part));
7454 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7455 op1 = force_operand (op1, target);
7456 return REDUCE_BIT_FIELD (op1);
7459 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7460 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7461 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7466 (modifier == EXPAND_INITIALIZER
7467 ? EXPAND_INITIALIZER : EXPAND_SUM));
7468 if (! CONSTANT_P (op0))
7470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7471 VOIDmode, modifier);
7472 /* Return a PLUS if modifier says it's OK. */
7473 if (modifier == EXPAND_SUM
7474 || modifier == EXPAND_INITIALIZER)
7475 return simplify_gen_binary (PLUS, mode, op0, op1);
7478 /* Use immed_double_const to ensure that the constant is
7479 truncated according to the mode of OP1, then sign extended
7480 to a HOST_WIDE_INT. Using the constant directly can result
7481 in non-canonical RTL in a 64x32 cross compile. */
7483 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7485 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7486 op0 = plus_constant (op0, INTVAL (constant_part));
7487 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7488 op0 = force_operand (op0, target);
7489 return REDUCE_BIT_FIELD (op0);
7493 /* No sense saving up arithmetic to be done
7494 if it's all in the wrong mode to form part of an address.
7495 And force_operand won't know whether to sign-extend or
7497 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7498 || mode != ptr_mode)
7500 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7501 subtarget, &op0, &op1, 0);
7502 if (op0 == const0_rtx)
7504 if (op1 == const0_rtx)
7509 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7510 subtarget, &op0, &op1, modifier);
7511 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7514 /* For initializers, we are allowed to return a MINUS of two
7515 symbolic constants. Here we handle all cases when both operands
7517 /* Handle difference of two symbolic constants,
7518 for the sake of an initializer. */
7519 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7520 && really_constant_p (TREE_OPERAND (exp, 0))
7521 && really_constant_p (TREE_OPERAND (exp, 1)))
7523 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7524 NULL_RTX, &op0, &op1, modifier);
7526 /* If the last operand is a CONST_INT, use plus_constant of
7527 the negated constant. Else make the MINUS. */
7528 if (GET_CODE (op1) == CONST_INT)
7529 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7531 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7534 /* No sense saving up arithmetic to be done
7535 if it's all in the wrong mode to form part of an address.
7536 And force_operand won't know whether to sign-extend or
7538 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7539 || mode != ptr_mode)
7542 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7543 subtarget, &op0, &op1, modifier);
7545 /* Convert A - const to A + (-const). */
7546 if (GET_CODE (op1) == CONST_INT)
7548 op1 = negate_rtx (mode, op1);
7549 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7555 /* If first operand is constant, swap them.
7556 Thus the following special case checks need only
7557 check the second operand. */
7558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7560 tree t1 = TREE_OPERAND (exp, 0);
7561 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7562 TREE_OPERAND (exp, 1) = t1;
7565 /* Attempt to return something suitable for generating an
7566 indexed address, for machines that support that. */
7568 if (modifier == EXPAND_SUM && mode == ptr_mode
7569 && host_integerp (TREE_OPERAND (exp, 1), 0))
7571 tree exp1 = TREE_OPERAND (exp, 1);
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7577 op0 = force_operand (op0, NULL_RTX);
7579 op0 = copy_to_mode_reg (mode, op0);
7581 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7582 gen_int_mode (tree_low_cst (exp1, 0),
7583 TYPE_MODE (TREE_TYPE (exp1)))));
7586 if (modifier == EXPAND_STACK_PARM)
7589 /* Check for multiplying things that have been extended
7590 from a narrower type. If this machine supports multiplying
7591 in that narrower type with a result in the desired type,
7592 do it that way, and avoid the explicit type-conversion. */
7593 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7594 && TREE_CODE (type) == INTEGER_TYPE
7595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7596 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7597 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7598 && int_fits_type_p (TREE_OPERAND (exp, 1),
7599 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7600 /* Don't use a widening multiply if a shift will do. */
7601 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7602 > HOST_BITS_PER_WIDE_INT)
7603 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7605 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7606 && (TYPE_PRECISION (TREE_TYPE
7607 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7608 == TYPE_PRECISION (TREE_TYPE
7610 (TREE_OPERAND (exp, 0), 0))))
7611 /* If both operands are extended, they must either both
7612 be zero-extended or both be sign-extended. */
7613 && (TYPE_UNSIGNED (TREE_TYPE
7614 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7615 == TYPE_UNSIGNED (TREE_TYPE
7617 (TREE_OPERAND (exp, 0), 0)))))))
7619 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7620 enum machine_mode innermode = TYPE_MODE (op0type);
7621 bool zextend_p = TYPE_UNSIGNED (op0type);
7622 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7623 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7625 if (mode == GET_MODE_WIDER_MODE (innermode))
7627 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7629 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7630 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7631 TREE_OPERAND (exp, 1),
7632 NULL_RTX, &op0, &op1, 0);
7634 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7635 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7636 NULL_RTX, &op0, &op1, 0);
7639 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7640 && innermode == word_mode)
7643 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7644 NULL_RTX, VOIDmode, 0);
7645 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7646 op1 = convert_modes (innermode, mode,
7647 expand_expr (TREE_OPERAND (exp, 1),
7648 NULL_RTX, VOIDmode, 0),
7651 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7652 NULL_RTX, VOIDmode, 0);
7653 temp = expand_binop (mode, other_optab, op0, op1, target,
7654 unsignedp, OPTAB_LIB_WIDEN);
7655 hipart = gen_highpart (innermode, temp);
7656 htem = expand_mult_highpart_adjust (innermode, hipart,
7660 emit_move_insn (hipart, htem);
7661 return REDUCE_BIT_FIELD (temp);
7665 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7666 subtarget, &op0, &op1, 0);
7667 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7669 case TRUNC_DIV_EXPR:
7670 case FLOOR_DIV_EXPR:
7672 case ROUND_DIV_EXPR:
7673 case EXACT_DIV_EXPR:
7674 if (modifier == EXPAND_STACK_PARM)
7676 /* Possible optimization: compute the dividend with EXPAND_SUM
7677 then if the divisor is constant can optimize the case
7678 where some terms of the dividend have coeffs divisible by it. */
7679 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7680 subtarget, &op0, &op1, 0);
7681 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7684 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7685 expensive divide. If not, combine will rebuild the original
7687 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7688 && TREE_CODE (type) == REAL_TYPE
7689 && !real_onep (TREE_OPERAND (exp, 0)))
7690 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7691 build2 (RDIV_EXPR, type,
7692 build_real (type, dconst1),
7693 TREE_OPERAND (exp, 1))),
7694 target, tmode, modifier);
7698 case TRUNC_MOD_EXPR:
7699 case FLOOR_MOD_EXPR:
7701 case ROUND_MOD_EXPR:
7702 if (modifier == EXPAND_STACK_PARM)
7704 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7705 subtarget, &op0, &op1, 0);
7706 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7708 case FIX_ROUND_EXPR:
7709 case FIX_FLOOR_EXPR:
7711 gcc_unreachable (); /* Not used for C. */
7713 case FIX_TRUNC_EXPR:
7714 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7715 if (target == 0 || modifier == EXPAND_STACK_PARM)
7716 target = gen_reg_rtx (mode);
7717 expand_fix (target, op0, unsignedp);
7721 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7722 if (target == 0 || modifier == EXPAND_STACK_PARM)
7723 target = gen_reg_rtx (mode);
7724 /* expand_float can't figure out what to do if FROM has VOIDmode.
7725 So give it the correct mode. With -O, cse will optimize this. */
7726 if (GET_MODE (op0) == VOIDmode)
7727 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7729 expand_float (target, op0,
7730 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7735 if (modifier == EXPAND_STACK_PARM)
7737 temp = expand_unop (mode,
7738 optab_for_tree_code (NEGATE_EXPR, type),
7741 return REDUCE_BIT_FIELD (temp);
7744 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7745 if (modifier == EXPAND_STACK_PARM)
7748 /* ABS_EXPR is not valid for complex arguments. */
7749 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7750 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7752 /* Unsigned abs is simply the operand. Testing here means we don't
7753 risk generating incorrect code below. */
7754 if (TYPE_UNSIGNED (type))
7757 return expand_abs (mode, op0, target, unsignedp,
7758 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7762 target = original_target;
7764 || modifier == EXPAND_STACK_PARM
7765 || (MEM_P (target) && MEM_VOLATILE_P (target))
7766 || GET_MODE (target) != mode
7768 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7769 target = gen_reg_rtx (mode);
7770 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7771 target, &op0, &op1, 0);
7773 /* First try to do it with a special MIN or MAX instruction.
7774 If that does not win, use a conditional jump to select the proper
7776 this_optab = optab_for_tree_code (code, type);
7777 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7782 /* At this point, a MEM target is no longer useful; we will get better
7785 if (! REG_P (target))
7786 target = gen_reg_rtx (mode);
7788 /* If op1 was placed in target, swap op0 and op1. */
7789 if (target != op0 && target == op1)
7796 /* We generate better code and avoid problems with op1 mentioning
7797 target by forcing op1 into a pseudo if it isn't a constant. */
7798 if (! CONSTANT_P (op1))
7799 op1 = force_reg (mode, op1);
7802 emit_move_insn (target, op0);
7804 op0 = gen_label_rtx ();
7806 /* If this mode is an integer too wide to compare properly,
7807 compare word by word. Rely on cse to optimize constant cases. */
7808 if (GET_MODE_CLASS (mode) == MODE_INT
7809 && ! can_compare_p (GE, mode, ccp_jump))
7811 if (code == MAX_EXPR)
7812 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7815 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7820 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7821 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7823 emit_move_insn (target, op1);
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7829 if (modifier == EXPAND_STACK_PARM)
7831 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7835 /* ??? Can optimize bitwise operations with one arg constant.
7836 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7837 and (a bitwise1 b) bitwise2 b (etc)
7838 but that is probably not worth while. */
7840 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7841 boolean values when we want in all cases to compute both of them. In
7842 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7843 as actual zero-or-1 values and then bitwise anding. In cases where
7844 there cannot be any side effects, better code would be made by
7845 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7846 how to recognize those cases. */
7848 case TRUTH_AND_EXPR:
7849 code = BIT_AND_EXPR;
7854 code = BIT_IOR_EXPR;
7858 case TRUTH_XOR_EXPR:
7859 code = BIT_XOR_EXPR;
7867 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7869 if (modifier == EXPAND_STACK_PARM)
7871 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7872 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7875 /* Could determine the answer when only additive constants differ. Also,
7876 the addition of one can be handled by changing the condition. */
7883 case UNORDERED_EXPR:
7891 temp = do_store_flag (exp,
7892 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7893 tmode != VOIDmode ? tmode : mode, 0);
7897 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7898 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7900 && REG_P (original_target)
7901 && (GET_MODE (original_target)
7902 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7904 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7907 /* If temp is constant, we can just compute the result. */
7908 if (GET_CODE (temp) == CONST_INT)
7910 if (INTVAL (temp) != 0)
7911 emit_move_insn (target, const1_rtx);
7913 emit_move_insn (target, const0_rtx);
7918 if (temp != original_target)
7920 enum machine_mode mode1 = GET_MODE (temp);
7921 if (mode1 == VOIDmode)
7922 mode1 = tmode != VOIDmode ? tmode : mode;
7924 temp = copy_to_mode_reg (mode1, temp);
7927 op1 = gen_label_rtx ();
7928 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7929 GET_MODE (temp), unsignedp, op1);
7930 emit_move_insn (temp, const1_rtx);
7935 /* If no set-flag instruction, must generate a conditional store
7936 into a temporary variable. Drop through and handle this
7941 || modifier == EXPAND_STACK_PARM
7942 || ! safe_from_p (target, exp, 1)
7943 /* Make sure we don't have a hard reg (such as function's return
7944 value) live across basic blocks, if not optimizing. */
7945 || (!optimize && REG_P (target)
7946 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7947 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7950 emit_move_insn (target, const0_rtx);
7952 op1 = gen_label_rtx ();
7953 jumpifnot (exp, op1);
7956 emit_move_insn (target, const1_rtx);
7959 return ignore ? const0_rtx : target;
7961 case TRUTH_NOT_EXPR:
7962 if (modifier == EXPAND_STACK_PARM)
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7965 /* The parser is careful to generate TRUTH_NOT_EXPR
7966 only with operands that are always zero or one. */
7967 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7968 target, 1, OPTAB_LIB_WIDEN);
7972 case STATEMENT_LIST:
7974 tree_stmt_iterator iter;
7976 gcc_assert (ignore);
7978 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7979 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7984 /* A COND_EXPR with its type being VOID_TYPE represents a
7985 conditional jump and is handled in
7986 expand_gimple_cond_expr. */
7987 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
7989 /* Note that COND_EXPRs whose type is a structure or union
7990 are required to be constructed to contain assignments of
7991 a temporary variable, so that we can evaluate them here
7992 for side effect only. If type is void, we must do likewise. */
7994 gcc_assert (!TREE_ADDRESSABLE (type)
7996 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7997 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7999 /* If we are not to produce a result, we have no target. Otherwise,
8000 if a target was specified use it; it will not be used as an
8001 intermediate target unless it is safe. If no target, use a
8004 if (modifier != EXPAND_STACK_PARM
8006 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8007 && GET_MODE (original_target) == mode
8008 #ifdef HAVE_conditional_move
8009 && (! can_conditionally_move_p (mode)
8010 || REG_P (original_target))
8012 && !MEM_P (original_target))
8013 temp = original_target;
8015 temp = assign_temp (type, 0, 0, 1);
8017 do_pending_stack_adjust ();
8019 op0 = gen_label_rtx ();
8020 op1 = gen_label_rtx ();
8021 jumpifnot (TREE_OPERAND (exp, 0), op0);
8022 store_expr (TREE_OPERAND (exp, 1), temp,
8023 modifier == EXPAND_STACK_PARM);
8025 emit_jump_insn (gen_jump (op1));
8028 store_expr (TREE_OPERAND (exp, 2), temp,
8029 modifier == EXPAND_STACK_PARM);
8036 target = expand_vec_cond_expr (exp, target);
8041 tree lhs = TREE_OPERAND (exp, 0);
8042 tree rhs = TREE_OPERAND (exp, 1);
8044 gcc_assert (ignore);
8046 /* Check for |= or &= of a bitfield of size one into another bitfield
8047 of size 1. In this case, (unless we need the result of the
8048 assignment) we can do this more efficiently with a
8049 test followed by an assignment, if necessary.
8051 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8052 things change so we do, this code should be enhanced to
8054 if (TREE_CODE (lhs) == COMPONENT_REF
8055 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8056 || TREE_CODE (rhs) == BIT_AND_EXPR)
8057 && TREE_OPERAND (rhs, 0) == lhs
8058 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8059 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8060 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8062 rtx label = gen_label_rtx ();
8064 do_jump (TREE_OPERAND (rhs, 1),
8065 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8066 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8067 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8068 (TREE_CODE (rhs) == BIT_IOR_EXPR
8070 : integer_zero_node)));
8071 do_pending_stack_adjust ();
8076 expand_assignment (lhs, rhs);
8082 if (!TREE_OPERAND (exp, 0))
8083 expand_null_return ();
8085 expand_return (TREE_OPERAND (exp, 0));
8089 return expand_expr_addr_expr (exp, target, tmode, modifier);
8092 /* Get the rtx code of the operands. */
8093 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8094 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8097 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8099 /* Move the real (op0) and imaginary (op1) parts to their location. */
8100 write_complex_part (target, op0, false);
8101 write_complex_part (target, op1, true);
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8107 return read_complex_part (op0, false);
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8111 return read_complex_part (op0, true);
8114 expand_resx_expr (exp);
8117 case TRY_CATCH_EXPR:
8119 case EH_FILTER_EXPR:
8120 case TRY_FINALLY_EXPR:
8121 /* Lowered by tree-eh.c. */
8124 case WITH_CLEANUP_EXPR:
8125 case CLEANUP_POINT_EXPR:
8127 case CASE_LABEL_EXPR:
8133 case PREINCREMENT_EXPR:
8134 case PREDECREMENT_EXPR:
8135 case POSTINCREMENT_EXPR:
8136 case POSTDECREMENT_EXPR:
8139 case TRUTH_ANDIF_EXPR:
8140 case TRUTH_ORIF_EXPR:
8141 /* Lowered by gimplify.c. */
8145 return get_exception_pointer (cfun);
8148 return get_exception_filter (cfun);
8151 /* Function descriptors are not valid except for as
8152 initialization constants, and should not be expanded. */
8160 expand_label (TREE_OPERAND (exp, 0));
8164 expand_asm_expr (exp);
8167 case WITH_SIZE_EXPR:
8168 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8169 have pulled out the size to use in whatever context it needed. */
8170 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8173 case REALIGN_LOAD_EXPR:
8175 tree oprnd0 = TREE_OPERAND (exp, 0);
8176 tree oprnd1 = TREE_OPERAND (exp, 1);
8177 tree oprnd2 = TREE_OPERAND (exp, 2);
8180 this_optab = optab_for_tree_code (code, type);
8181 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8182 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8183 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8192 return lang_hooks.expand_expr (exp, original_target, tmode,
8196 /* Here to do an ordinary binary operator. */
8198 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8199 subtarget, &op0, &op1, 0);
8201 this_optab = optab_for_tree_code (code, type);
8203 if (modifier == EXPAND_STACK_PARM)
8205 temp = expand_binop (mode, this_optab, op0, op1, target,
8206 unsignedp, OPTAB_LIB_WIDEN);
8208 return REDUCE_BIT_FIELD (temp);
8210 #undef REDUCE_BIT_FIELD
8212 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8213 signedness of TYPE), possibly returning the result in TARGET. */
8215 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8217 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8218 if (target && GET_MODE (target) != GET_MODE (exp))
8220 if (TYPE_UNSIGNED (type))
8223 if (prec < HOST_BITS_PER_WIDE_INT)
8224 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8227 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8228 ((unsigned HOST_WIDE_INT) 1
8229 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8231 return expand_and (GET_MODE (exp), exp, mask, target);
8235 tree count = build_int_cst (NULL_TREE,
8236 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8237 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8238 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8242 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8243 when applied to the address of EXP produces an address known to be
8244 aligned more than BIGGEST_ALIGNMENT. */
8247 is_aligning_offset (tree offset, tree exp)
8249 /* Strip off any conversions. */
8250 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8251 || TREE_CODE (offset) == NOP_EXPR
8252 || TREE_CODE (offset) == CONVERT_EXPR)
8253 offset = TREE_OPERAND (offset, 0);
8255 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8256 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8257 if (TREE_CODE (offset) != BIT_AND_EXPR
8258 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8259 || compare_tree_int (TREE_OPERAND (offset, 1),
8260 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8261 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8264 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8265 It must be NEGATE_EXPR. Then strip any more conversions. */
8266 offset = TREE_OPERAND (offset, 0);
8267 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8268 || TREE_CODE (offset) == NOP_EXPR
8269 || TREE_CODE (offset) == CONVERT_EXPR)
8270 offset = TREE_OPERAND (offset, 0);
8272 if (TREE_CODE (offset) != NEGATE_EXPR)
8275 offset = TREE_OPERAND (offset, 0);
8276 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8277 || TREE_CODE (offset) == NOP_EXPR
8278 || TREE_CODE (offset) == CONVERT_EXPR)
8279 offset = TREE_OPERAND (offset, 0);
8281 /* This must now be the address of EXP. */
8282 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8285 /* Return the tree node if an ARG corresponds to a string constant or zero
8286 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8287 in bytes within the string that ARG is accessing. The type of the
8288 offset will be `sizetype'. */
8291 string_constant (tree arg, tree *ptr_offset)
8296 if (TREE_CODE (arg) == ADDR_EXPR)
8298 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8300 *ptr_offset = size_zero_node;
8301 return TREE_OPERAND (arg, 0);
8303 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8305 array = TREE_OPERAND (arg, 0);
8306 offset = size_zero_node;
8308 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8310 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8311 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8312 if (TREE_CODE (array) != STRING_CST
8313 && TREE_CODE (array) != VAR_DECL)
8319 else if (TREE_CODE (arg) == PLUS_EXPR)
8321 tree arg0 = TREE_OPERAND (arg, 0);
8322 tree arg1 = TREE_OPERAND (arg, 1);
8327 if (TREE_CODE (arg0) == ADDR_EXPR
8328 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8329 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8331 array = TREE_OPERAND (arg0, 0);
8334 else if (TREE_CODE (arg1) == ADDR_EXPR
8335 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8336 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8338 array = TREE_OPERAND (arg1, 0);
8347 if (TREE_CODE (array) == STRING_CST)
8349 *ptr_offset = convert (sizetype, offset);
8352 else if (TREE_CODE (array) == VAR_DECL)
8356 /* Variables initialized to string literals can be handled too. */
8357 if (DECL_INITIAL (array) == NULL_TREE
8358 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8361 /* If they are read-only, non-volatile and bind locally. */
8362 if (! TREE_READONLY (array)
8363 || TREE_SIDE_EFFECTS (array)
8364 || ! targetm.binds_local_p (array))
8367 /* Avoid const char foo[4] = "abcde"; */
8368 if (DECL_SIZE_UNIT (array) == NULL_TREE
8369 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8370 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8371 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8374 /* If variable is bigger than the string literal, OFFSET must be constant
8375 and inside of the bounds of the string literal. */
8376 offset = convert (sizetype, offset);
8377 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8378 && (! host_integerp (offset, 1)
8379 || compare_tree_int (offset, length) >= 0))
8382 *ptr_offset = offset;
8383 return DECL_INITIAL (array);
8389 /* Generate code to calculate EXP using a store-flag instruction
8390 and return an rtx for the result. EXP is either a comparison
8391 or a TRUTH_NOT_EXPR whose operand is a comparison.
8393 If TARGET is nonzero, store the result there if convenient.
8395 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8398 Return zero if there is no suitable set-flag instruction
8399 available on this machine.
8401 Once expand_expr has been called on the arguments of the comparison,
8402 we are committed to doing the store flag, since it is not safe to
8403 re-evaluate the expression. We emit the store-flag insn by calling
8404 emit_store_flag, but only expand the arguments if we have a reason
8405 to believe that emit_store_flag will be successful. If we think that
8406 it will, but it isn't, we have to simulate the store-flag with a
8407 set/jump/set sequence. */
8410 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8413 tree arg0, arg1, type;
8415 enum machine_mode operand_mode;
8419 enum insn_code icode;
8420 rtx subtarget = target;
8423 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8424 result at the end. We can't simply invert the test since it would
8425 have already been inverted if it were valid. This case occurs for
8426 some floating-point comparisons. */
8428 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8429 invert = 1, exp = TREE_OPERAND (exp, 0);
8431 arg0 = TREE_OPERAND (exp, 0);
8432 arg1 = TREE_OPERAND (exp, 1);
8434 /* Don't crash if the comparison was erroneous. */
8435 if (arg0 == error_mark_node || arg1 == error_mark_node)
8438 type = TREE_TYPE (arg0);
8439 operand_mode = TYPE_MODE (type);
8440 unsignedp = TYPE_UNSIGNED (type);
8442 /* We won't bother with BLKmode store-flag operations because it would mean
8443 passing a lot of information to emit_store_flag. */
8444 if (operand_mode == BLKmode)
8447 /* We won't bother with store-flag operations involving function pointers
8448 when function pointers must be canonicalized before comparisons. */
8449 #ifdef HAVE_canonicalize_funcptr_for_compare
8450 if (HAVE_canonicalize_funcptr_for_compare
8451 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8452 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8454 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8455 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8456 == FUNCTION_TYPE))))
8463 /* Get the rtx comparison code to use. We know that EXP is a comparison
8464 operation of some type. Some comparisons against 1 and -1 can be
8465 converted to comparisons with zero. Do so here so that the tests
8466 below will be aware that we have a comparison with zero. These
8467 tests will not catch constants in the first operand, but constants
8468 are rarely passed as the first operand. */
8470 switch (TREE_CODE (exp))
8479 if (integer_onep (arg1))
8480 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8482 code = unsignedp ? LTU : LT;
8485 if (! unsignedp && integer_all_onesp (arg1))
8486 arg1 = integer_zero_node, code = LT;
8488 code = unsignedp ? LEU : LE;
8491 if (! unsignedp && integer_all_onesp (arg1))
8492 arg1 = integer_zero_node, code = GE;
8494 code = unsignedp ? GTU : GT;
8497 if (integer_onep (arg1))
8498 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8500 code = unsignedp ? GEU : GE;
8503 case UNORDERED_EXPR:
8532 /* Put a constant second. */
8533 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8535 tem = arg0; arg0 = arg1; arg1 = tem;
8536 code = swap_condition (code);
8539 /* If this is an equality or inequality test of a single bit, we can
8540 do this by shifting the bit being tested to the low-order bit and
8541 masking the result with the constant 1. If the condition was EQ,
8542 we xor it with 1. This does not require an scc insn and is faster
8543 than an scc insn even if we have it.
8545 The code to make this transformation was moved into fold_single_bit_test,
8546 so we just call into the folder and expand its result. */
8548 if ((code == NE || code == EQ)
8549 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8550 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8552 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8553 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8555 target, VOIDmode, EXPAND_NORMAL);
8558 /* Now see if we are likely to be able to do this. Return if not. */
8559 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8562 icode = setcc_gen_code[(int) code];
8563 if (icode == CODE_FOR_nothing
8564 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8566 /* We can only do this if it is one of the special cases that
8567 can be handled without an scc insn. */
8568 if ((code == LT && integer_zerop (arg1))
8569 || (! only_cheap && code == GE && integer_zerop (arg1)))
8571 else if (BRANCH_COST >= 0
8572 && ! only_cheap && (code == NE || code == EQ)
8573 && TREE_CODE (type) != REAL_TYPE
8574 && ((abs_optab->handlers[(int) operand_mode].insn_code
8575 != CODE_FOR_nothing)
8576 || (ffs_optab->handlers[(int) operand_mode].insn_code
8577 != CODE_FOR_nothing)))
8583 if (! get_subtarget (target)
8584 || GET_MODE (subtarget) != operand_mode)
8587 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8590 target = gen_reg_rtx (mode);
8592 result = emit_store_flag (target, code, op0, op1,
8593 operand_mode, unsignedp, 1);
8598 result = expand_binop (mode, xor_optab, result, const1_rtx,
8599 result, 0, OPTAB_LIB_WIDEN);
8603 /* If this failed, we have to do this with set/compare/jump/set code. */
8605 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8606 target = gen_reg_rtx (GET_MODE (target));
8608 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8609 result = compare_from_rtx (op0, op1, code, unsignedp,
8610 operand_mode, NULL_RTX);
8611 if (GET_CODE (result) == CONST_INT)
8612 return (((result == const0_rtx && ! invert)
8613 || (result != const0_rtx && invert))
8614 ? const0_rtx : const1_rtx);
8616 /* The code of RESULT may not match CODE if compare_from_rtx
8617 decided to swap its operands and reverse the original code.
8619 We know that compare_from_rtx returns either a CONST_INT or
8620 a new comparison code, so it is safe to just extract the
8621 code from RESULT. */
8622 code = GET_CODE (result);
8624 label = gen_label_rtx ();
8625 gcc_assert (bcc_gen_fctn[(int) code]);
8627 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8628 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8635 /* Stubs in case we haven't got a casesi insn. */
8637 # define HAVE_casesi 0
8638 # define gen_casesi(a, b, c, d, e) (0)
8639 # define CODE_FOR_casesi CODE_FOR_nothing
8642 /* If the machine does not have a case insn that compares the bounds,
8643 this means extra overhead for dispatch tables, which raises the
8644 threshold for using them. */
8645 #ifndef CASE_VALUES_THRESHOLD
8646 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8647 #endif /* CASE_VALUES_THRESHOLD */
8650 case_values_threshold (void)
8652 return CASE_VALUES_THRESHOLD;
8655 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8656 0 otherwise (i.e. if there is no casesi instruction). */
8658 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8659 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8661 enum machine_mode index_mode = SImode;
8662 int index_bits = GET_MODE_BITSIZE (index_mode);
8663 rtx op1, op2, index;
8664 enum machine_mode op_mode;
8669 /* Convert the index to SImode. */
8670 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8672 enum machine_mode omode = TYPE_MODE (index_type);
8673 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8675 /* We must handle the endpoints in the original mode. */
8676 index_expr = build2 (MINUS_EXPR, index_type,
8677 index_expr, minval);
8678 minval = integer_zero_node;
8679 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8680 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8681 omode, 1, default_label);
8682 /* Now we can safely truncate. */
8683 index = convert_to_mode (index_mode, index, 0);
8687 if (TYPE_MODE (index_type) != index_mode)
8689 index_expr = convert (lang_hooks.types.type_for_size
8690 (index_bits, 0), index_expr);
8691 index_type = TREE_TYPE (index_expr);
8694 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8697 do_pending_stack_adjust ();
8699 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8700 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8702 index = copy_to_mode_reg (op_mode, index);
8704 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8706 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8707 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8708 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8709 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8711 op1 = copy_to_mode_reg (op_mode, op1);
8713 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8715 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8716 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8717 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8718 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8720 op2 = copy_to_mode_reg (op_mode, op2);
8722 emit_jump_insn (gen_casesi (index, op1, op2,
8723 table_label, default_label));
8727 /* Attempt to generate a tablejump instruction; same concept. */
8728 #ifndef HAVE_tablejump
8729 #define HAVE_tablejump 0
8730 #define gen_tablejump(x, y) (0)
8733 /* Subroutine of the next function.
8735 INDEX is the value being switched on, with the lowest value
8736 in the table already subtracted.
8737 MODE is its expected mode (needed if INDEX is constant).
8738 RANGE is the length of the jump table.
8739 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8741 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8742 index value is out of range. */
8745 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8750 if (INTVAL (range) > cfun->max_jumptable_ents)
8751 cfun->max_jumptable_ents = INTVAL (range);
8753 /* Do an unsigned comparison (in the proper mode) between the index
8754 expression and the value which represents the length of the range.
8755 Since we just finished subtracting the lower bound of the range
8756 from the index expression, this comparison allows us to simultaneously
8757 check that the original index expression value is both greater than
8758 or equal to the minimum value of the range and less than or equal to
8759 the maximum value of the range. */
8761 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8764 /* If index is in range, it must fit in Pmode.
8765 Convert to Pmode so we can index with it. */
8767 index = convert_to_mode (Pmode, index, 1);
8769 /* Don't let a MEM slip through, because then INDEX that comes
8770 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8771 and break_out_memory_refs will go to work on it and mess it up. */
8772 #ifdef PIC_CASE_VECTOR_ADDRESS
8773 if (flag_pic && !REG_P (index))
8774 index = copy_to_mode_reg (Pmode, index);
8777 /* If flag_force_addr were to affect this address
8778 it could interfere with the tricky assumptions made
8779 about addresses that contain label-refs,
8780 which may be valid only very near the tablejump itself. */
8781 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8782 GET_MODE_SIZE, because this indicates how large insns are. The other
8783 uses should all be Pmode, because they are addresses. This code
8784 could fail if addresses and insns are not the same size. */
8785 index = gen_rtx_PLUS (Pmode,
8786 gen_rtx_MULT (Pmode, index,
8787 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8788 gen_rtx_LABEL_REF (Pmode, table_label));
8789 #ifdef PIC_CASE_VECTOR_ADDRESS
8791 index = PIC_CASE_VECTOR_ADDRESS (index);
8794 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8795 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8796 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8797 convert_move (temp, vector, 0);
8799 emit_jump_insn (gen_tablejump (temp, table_label));
8801 /* If we are generating PIC code or if the table is PC-relative, the
8802 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8803 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8808 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8809 rtx table_label, rtx default_label)
8813 if (! HAVE_tablejump)
8816 index_expr = fold (build2 (MINUS_EXPR, index_type,
8817 convert (index_type, index_expr),
8818 convert (index_type, minval)));
8819 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8820 do_pending_stack_adjust ();
8822 do_tablejump (index, TYPE_MODE (index_type),
8823 convert_modes (TYPE_MODE (index_type),
8824 TYPE_MODE (TREE_TYPE (range)),
8825 expand_expr (range, NULL_RTX,
8827 TYPE_UNSIGNED (TREE_TYPE (range))),
8828 table_label, default_label);
8832 /* Nonzero if the mode is a valid vector mode for this architecture.
8833 This returns nonzero even if there is no hardware support for the
8834 vector mode, but we can emulate with narrower modes. */
8837 vector_mode_valid_p (enum machine_mode mode)
8839 enum mode_class class = GET_MODE_CLASS (mode);
8840 enum machine_mode innermode;
8842 /* Doh! What's going on? */
8843 if (class != MODE_VECTOR_INT
8844 && class != MODE_VECTOR_FLOAT)
8847 /* Hardware support. Woo hoo! */
8848 if (targetm.vector_mode_supported_p (mode))
8851 innermode = GET_MODE_INNER (mode);
8853 /* We should probably return 1 if requesting V4DI and we have no DI,
8854 but we have V2DI, but this is probably very unlikely. */
8856 /* If we have support for the inner mode, we can safely emulate it.
8857 We may not have V2DI, but me can emulate with a pair of DIs. */
8858 return targetm.scalar_mode_supported_p (innermode);
8861 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8863 const_vector_from_tree (tree exp)
8868 enum machine_mode inner, mode;
8870 mode = TYPE_MODE (TREE_TYPE (exp));
8872 if (initializer_zerop (exp))
8873 return CONST0_RTX (mode);
8875 units = GET_MODE_NUNITS (mode);
8876 inner = GET_MODE_INNER (mode);
8878 v = rtvec_alloc (units);
8880 link = TREE_VECTOR_CST_ELTS (exp);
8881 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8883 elt = TREE_VALUE (link);
8885 if (TREE_CODE (elt) == REAL_CST)
8886 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8889 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8890 TREE_INT_CST_HIGH (elt),
8894 /* Initialize remaining elements to 0. */
8895 for (; i < units; ++i)
8896 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8898 return gen_rtx_CONST_VECTOR (mode, v);
8900 #include "gt-expr.h"