1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
147 static rtx var_rtx (tree);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movmem_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
201 /* These arrays record the insn_code of two different kinds of insns
202 to perform block compares. */
203 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
204 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
206 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 /* This is run once per compilation to set up which modes can be used
213 directly in memory and to initialize the block move optab. */
216 init_expr_once (void)
219 enum machine_mode mode;
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230 /* A scratch register we can modify in-place below to avoid
231 useless RTL allocations. */
232 reg = gen_rtx_REG (VOIDmode, -1);
234 insn = rtx_alloc (INSN);
235 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
236 PATTERN (insn) = pat;
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246 PUT_MODE (reg, mode);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 if (! HARD_REGNO_MODE_OK (regno, mode))
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
283 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
286 mode = GET_MODE_WIDER_MODE (mode))
288 enum machine_mode srcmode;
289 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
290 srcmode = GET_MODE_WIDER_MODE (srcmode))
294 ic = can_extend_p (mode, srcmode, 0);
295 if (ic == CODE_FOR_nothing)
298 PUT_MODE (mem, srcmode);
300 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
301 float_extend_from_mem[mode][srcmode] = true;
306 /* This is run at the start of compiling a function. */
311 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
314 /* Copy data from FROM to TO, where the machine modes are not the same.
315 Both modes may be integer, or both may be floating.
316 UNSIGNEDP should be nonzero if FROM is an unsigned type.
317 This causes zero-extension instead of sign-extension. */
320 convert_move (rtx to, rtx from, int unsignedp)
322 enum machine_mode to_mode = GET_MODE (to);
323 enum machine_mode from_mode = GET_MODE (from);
324 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
325 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
329 /* rtx code for making an equivalent value. */
330 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
331 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
334 if (to_real != from_real)
337 /* If the source and destination are already the same, then there's
342 /* If FROM is a SUBREG that indicates that we have already done at least
343 the required extension, strip it. We don't handle such SUBREGs as
346 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
347 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
348 >= GET_MODE_SIZE (to_mode))
349 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
350 from = gen_lowpart (to_mode, from), from_mode = to_mode;
352 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
355 if (to_mode == from_mode
356 || (from_mode == VOIDmode && CONSTANT_P (from)))
358 emit_move_insn (to, from);
362 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
364 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
367 if (VECTOR_MODE_P (to_mode))
368 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
372 emit_move_insn (to, from);
376 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
378 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
379 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
388 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
395 /* Try converting directly if the insn is supported. */
397 code = tab->handlers[to_mode][from_mode].insn_code;
398 if (code != CODE_FOR_nothing)
400 emit_unop_insn (code, to, from,
401 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
405 /* Otherwise use a libcall. */
406 libcall = tab->handlers[to_mode][from_mode].libfunc;
409 /* This conversion is not implemented yet. */
413 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 insns = get_insns ();
417 emit_libcall_block (insns, to, value,
418 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 : gen_rtx_FLOAT_EXTEND (to_mode, from));
424 /* Handle pointer conversion. */ /* SPEE 900220. */
425 /* Targets are expected to provide conversion insns between PxImode and
426 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
427 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432 if (trunc_optab->handlers[to_mode][full_mode].insn_code
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 if (sext_optab->handlers[full_mode][from_mode].insn_code
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
453 if (to_mode == full_mode)
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
460 /* Now both modes are integers. */
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
504 /* No special multiword conversion insn; do it by hand. */
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
517 lowpart_mode = from_mode;
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
524 /* Compute the value to put in each remaining word. */
526 fill_value = const0_rtx;
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
559 if (fill_value != subword)
560 emit_move_insn (subword, fill_value);
563 insns = get_insns ();
566 emit_no_conflict_block (insns, to, from, NULL_RTX,
567 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
573 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0)))
580 || GET_CODE (from) == SUBREG))
581 from = force_reg (from_mode, from);
582 convert_move (to, gen_lowpart (word_mode, from), 0);
586 /* Now follow all the conversions between integers
587 no more than a word long. */
589 /* For truncation, usually we can just refer to FROM in a narrower mode. */
590 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
591 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
592 GET_MODE_BITSIZE (from_mode)))
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
608 /* Handle extension. */
609 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
616 from = force_not_mem (from);
618 emit_unop_insn (code, to, from, equiv_code);
623 enum machine_mode intermediate;
627 /* Search for a mode to convert via. */
628 for (intermediate = from_mode; intermediate != VOIDmode;
629 intermediate = GET_MODE_WIDER_MODE (intermediate))
630 if (((can_extend_p (to_mode, intermediate, unsignedp)
632 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (intermediate))))
635 && (can_extend_p (intermediate, from_mode, unsignedp)
636 != CODE_FOR_nothing))
638 convert_move (to, convert_to_mode (intermediate, from,
639 unsignedp), unsignedp);
643 /* No suitable intermediate mode.
644 Generate what we need with shifts. */
645 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode), 0);
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
653 emit_move_insn (to, tmp);
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
680 /* Mode combination is not recognized. */
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
694 return convert_modes (mode, VOIDmode, x, unsignedp);
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
736 HOST_WIDE_INT val = INTVAL (x);
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
741 int width = GET_MODE_BITSIZE (oldmode);
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
785 return gen_int_mode (val, mode);
788 return gen_lowpart (mode, x);
791 /* Converting from integer constant into mode is always equivalent to an
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
795 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
797 return simplify_gen_subreg (mode, x, oldmode, 0);
800 temp = gen_reg_rtx (mode);
801 convert_move (temp, x, unsignedp);
805 /* STORE_MAX_PIECES is the number of bytes at a time that we can
806 store efficiently. Due to internal GCC limitations, this is
807 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
808 for an immediate constant. */
810 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
812 /* Determine whether the LEN bytes can be moved by using several move
813 instructions. Return nonzero if a call to move_by_pieces should
817 can_move_by_pieces (unsigned HOST_WIDE_INT len,
818 unsigned int align ATTRIBUTE_UNUSED)
820 return MOVE_BY_PIECES_P (len, align);
823 /* Generate several move instructions to copy LEN bytes from block FROM to
824 block TO. (These are MEM rtx's with BLKmode).
826 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
827 used to push FROM to the stack.
829 ALIGN is maximum stack alignment we can assume.
831 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
832 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
836 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
837 unsigned int align, int endp)
839 struct move_by_pieces data;
840 rtx to_addr, from_addr = XEXP (from, 0);
841 unsigned int max_size = MOVE_MAX_PIECES + 1;
842 enum machine_mode mode = VOIDmode, tmode;
843 enum insn_code icode;
845 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
848 data.from_addr = from_addr;
851 to_addr = XEXP (to, 0);
854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
864 #ifdef STACK_GROWS_DOWNWARD
870 data.to_addr = to_addr;
873 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
874 || GET_CODE (from_addr) == POST_INC
875 || GET_CODE (from_addr) == POST_DEC);
877 data.explicit_inc_from = 0;
878 data.explicit_inc_to = 0;
879 if (data.reverse) data.offset = len;
882 /* If copying requires more than two move insns,
883 copy addresses to registers (to make displacements shorter)
884 and use post-increment if available. */
885 if (!(data.autinc_from && data.autinc_to)
886 && move_by_pieces_ninsns (len, align) > 2)
888 /* Find the mode of the largest move... */
889 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
890 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
891 if (GET_MODE_SIZE (tmode) < max_size)
894 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
896 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
902 data.from_addr = copy_addr_to_reg (from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_addr_to_reg (from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
910 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
912 data.explicit_inc_to = -1;
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
916 data.to_addr = copy_addr_to_reg (to_addr);
918 data.explicit_inc_to = 1;
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_addr_to_reg (to_addr);
924 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
925 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
926 align = MOVE_MAX * BITS_PER_UNIT;
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
938 if (mode == VOIDmode)
941 icode = mov_optab->handlers[(int) mode].insn_code;
942 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
943 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
945 max_size = GET_MODE_SIZE (mode);
948 /* The code above should have handled everything. */
962 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
963 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
965 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
968 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
975 to1 = adjust_address (data.to, QImode, data.offset);
983 /* Return number of insns required to move L bytes by pieces.
984 ALIGN (in bits) is maximum alignment we can assume. */
986 static unsigned HOST_WIDE_INT
987 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
989 unsigned HOST_WIDE_INT n_insns = 0;
990 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
992 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
993 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
994 align = MOVE_MAX * BITS_PER_UNIT;
998 enum machine_mode mode = VOIDmode, tmode;
999 enum insn_code icode;
1001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1003 if (GET_MODE_SIZE (tmode) < max_size)
1006 if (mode == VOIDmode)
1009 icode = mov_optab->handlers[(int) mode].insn_code;
1010 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1011 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1013 max_size = GET_MODE_SIZE (mode);
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1026 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1027 struct move_by_pieces *data)
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1032 while (data->len >= size)
1035 data->offset -= size;
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1043 to1 = adjust_address (data->to, mode, data->offset);
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1050 from1 = adjust_address (data->from, mode, data->offset);
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 GEN_INT (-(HOST_WIDE_INT)size)));
1055 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 GEN_INT (-(HOST_WIDE_INT)size)));
1060 emit_insn ((*genfun) (to1, from1));
1063 #ifdef PUSH_ROUNDING
1064 emit_single_push_insn (mode, from1, NULL);
1070 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1071 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1073 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1075 if (! data->reverse)
1076 data->offset += size;
1082 /* Emit code to move a block Y to a block X. This may be done with
1083 string-move instructions, with multiple scalar move instructions,
1084 or with a library call.
1086 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1087 SIZE is an rtx that says how long they are.
1088 ALIGN is the maximum alignment we can assume they have.
1089 METHOD describes what kind of copy this is, and what mechanisms may be used.
1091 Return the address of the new block, if memcpy is called and returns it,
1095 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1103 case BLOCK_OP_NORMAL:
1104 may_use_call = true;
1107 case BLOCK_OP_CALL_PARM:
1108 may_use_call = block_move_libcall_safe_for_call_parm ();
1110 /* Make inhibit_defer_pop nonzero around the library call
1111 to force it to pop the arguments right away. */
1115 case BLOCK_OP_NO_LIBCALL:
1116 may_use_call = false;
1123 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (GET_CODE (size) == CONST_INT)
1141 if (INTVAL (size) == 0)
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, size);
1147 set_mem_size (y, size);
1150 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align))
1154 else if (may_use_call)
1155 retval = emit_block_move_via_libcall (x, y, size);
1157 emit_block_move_via_loop (x, y, size, align);
1159 if (method == BLOCK_OP_CALL_PARM)
1165 /* A subroutine of emit_block_move. Returns true if calling the
1166 block move libcall will not clobber any parameters which may have
1167 already been placed on the stack. */
1170 block_move_libcall_safe_for_call_parm (void)
1172 /* If arguments are pushed on the stack, then they're safe. */
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1180 tree fn = emit_block_move_libcall_fn (false);
1182 if (REG_PARM_STACK_SPACE (fn) != 0)
1187 /* If any argument goes in memory, then it might clobber an outgoing
1190 CUMULATIVE_ARGS args_so_far;
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1199 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1201 if (!tmp || !REG_P (tmp))
1203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1206 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1212 /* A subroutine of emit_block_move. Expand a movmem pattern;
1213 return true if successful. */
1216 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1218 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1219 int save_volatile_ok = volatile_ok;
1220 enum machine_mode mode;
1222 /* Since this is a move insn, we don't care about volatility. */
1225 /* Try the most limited insn first, because there's no point
1226 including more than one in the machine description unless
1227 the more limited one has some advantage. */
1229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1230 mode = GET_MODE_WIDER_MODE (mode))
1232 enum insn_code code = movmem_optab[(int) mode];
1233 insn_operand_predicate_fn pred;
1235 if (code != CODE_FOR_nothing
1236 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1237 here because if SIZE is less than the mode mask, as it is
1238 returned by the macro, it will definitely be less than the
1239 actual mode mask. */
1240 && ((GET_CODE (size) == CONST_INT
1241 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1242 <= (GET_MODE_MASK (mode) >> 1)))
1243 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1244 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1245 || (*pred) (x, BLKmode))
1246 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1247 || (*pred) (y, BLKmode))
1248 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1249 || (*pred) (opalign, VOIDmode)))
1252 rtx last = get_last_insn ();
1255 op2 = convert_to_mode (mode, size, 1);
1256 pred = insn_data[(int) code].operand[2].predicate;
1257 if (pred != 0 && ! (*pred) (op2, mode))
1258 op2 = copy_to_mode_reg (mode, op2);
1260 /* ??? When called via emit_block_move_for_call, it'd be
1261 nice if there were some way to inform the backend, so
1262 that it doesn't fail the expansion because it thinks
1263 emitting the libcall would be more efficient. */
1265 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1269 volatile_ok = save_volatile_ok;
1273 delete_insns_since (last);
1277 volatile_ok = save_volatile_ok;
1281 /* A subroutine of emit_block_move. Expand a call to memcpy.
1282 Return the return value from memcpy, 0 otherwise. */
1285 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1287 rtx dst_addr, src_addr;
1288 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1289 enum machine_mode size_mode;
1292 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1293 pseudos. We can then place those new pseudos into a VAR_DECL and
1296 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1297 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1299 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1300 src_addr = convert_memory_address (ptr_mode, src_addr);
1302 dst_tree = make_tree (ptr_type_node, dst_addr);
1303 src_tree = make_tree (ptr_type_node, src_addr);
1305 size_mode = TYPE_MODE (sizetype);
1307 size = convert_to_mode (size_mode, size, 1);
1308 size = copy_to_mode_reg (size_mode, size);
1310 /* It is incorrect to use the libcall calling conventions to call
1311 memcpy in this context. This could be a user call to memcpy and
1312 the user may wish to examine the return value from memcpy. For
1313 targets where libcalls and normal calls have different conventions
1314 for returning pointers, we could end up generating incorrect code. */
1316 size_tree = make_tree (sizetype, size);
1318 fn = emit_block_move_libcall_fn (true);
1319 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1320 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1321 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1323 /* Now we have to build up the CALL_EXPR itself. */
1324 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1325 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1326 call_expr, arg_list, NULL_TREE);
1328 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1330 /* If we are initializing a readonly value, show the above call clobbered
1331 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1332 the delay slot scheduler might overlook conflicts and take nasty
1334 if (RTX_UNCHANGING_P (dst))
1335 add_function_usage_to
1336 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1337 gen_rtx_CLOBBER (VOIDmode, dst),
1343 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1344 for the function we use for block copies. The first time FOR_CALL
1345 is true, we call assemble_external. */
1347 static GTY(()) tree block_move_fn;
1350 init_block_move_fn (const char *asmspec)
1356 fn = get_identifier ("memcpy");
1357 args = build_function_type_list (ptr_type_node, ptr_type_node,
1358 const_ptr_type_node, sizetype,
1361 fn = build_decl (FUNCTION_DECL, fn, args);
1362 DECL_EXTERNAL (fn) = 1;
1363 TREE_PUBLIC (fn) = 1;
1364 DECL_ARTIFICIAL (fn) = 1;
1365 TREE_NOTHROW (fn) = 1;
1372 SET_DECL_RTL (block_move_fn, NULL_RTX);
1373 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1378 emit_block_move_libcall_fn (int for_call)
1380 static bool emitted_extern;
1383 init_block_move_fn (NULL);
1385 if (for_call && !emitted_extern)
1387 emitted_extern = true;
1388 make_decl_rtl (block_move_fn, NULL);
1389 assemble_external (block_move_fn);
1392 return block_move_fn;
1395 /* A subroutine of emit_block_move. Copy the data via an explicit
1396 loop. This is used only when libcalls are forbidden. */
1397 /* ??? It'd be nice to copy in hunks larger than QImode. */
1400 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1401 unsigned int align ATTRIBUTE_UNUSED)
1403 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1404 enum machine_mode iter_mode;
1406 iter_mode = GET_MODE (size);
1407 if (iter_mode == VOIDmode)
1408 iter_mode = word_mode;
1410 top_label = gen_label_rtx ();
1411 cmp_label = gen_label_rtx ();
1412 iter = gen_reg_rtx (iter_mode);
1414 emit_move_insn (iter, const0_rtx);
1416 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1417 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1418 do_pending_stack_adjust ();
1420 emit_jump (cmp_label);
1421 emit_label (top_label);
1423 tmp = convert_modes (Pmode, iter_mode, iter, true);
1424 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1425 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1426 x = change_address (x, QImode, x_addr);
1427 y = change_address (y, QImode, y_addr);
1429 emit_move_insn (x, y);
1431 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1432 true, OPTAB_LIB_WIDEN);
1434 emit_move_insn (iter, tmp);
1436 emit_label (cmp_label);
1438 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1442 /* Copy all or part of a value X into registers starting at REGNO.
1443 The number of registers to be filled is NREGS. */
1446 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1449 #ifdef HAVE_load_multiple
1457 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1458 x = validize_mem (force_const_mem (mode, x));
1460 /* See if the machine can do this with a load multiple insn. */
1461 #ifdef HAVE_load_multiple
1462 if (HAVE_load_multiple)
1464 last = get_last_insn ();
1465 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1473 delete_insns_since (last);
1477 for (i = 0; i < nregs; i++)
1478 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1479 operand_subword_force (x, i, mode));
1482 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1486 move_block_from_reg (int regno, rtx x, int nregs)
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 if (HAVE_store_multiple)
1497 rtx last = get_last_insn ();
1498 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1506 delete_insns_since (last);
1510 for (i = 0; i < nregs; i++)
1512 rtx tem = operand_subword (x, i, 1, BLKmode);
1517 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1521 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1522 ORIG, where ORIG is a non-consecutive group of registers represented by
1523 a PARALLEL. The clone is identical to the original except in that the
1524 original set of registers is replaced by a new set of pseudo registers.
1525 The new set has the same modes as the original set. */
1528 gen_group_rtx (rtx orig)
1533 if (GET_CODE (orig) != PARALLEL)
1536 length = XVECLEN (orig, 0);
1537 tmps = alloca (sizeof (rtx) * length);
1539 /* Skip a NULL entry in first slot. */
1540 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1545 for (; i < length; i++)
1547 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1548 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1550 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1553 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1556 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1557 where DST is non-consecutive registers represented by a PARALLEL.
1558 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1562 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1567 if (GET_CODE (dst) != PARALLEL)
1570 /* Check for a NULL entry, used to indicate that the parameter goes
1571 both on the stack and in registers. */
1572 if (XEXP (XVECEXP (dst, 0, 0), 0))
1577 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1579 /* Process the pieces. */
1580 for (i = start; i < XVECLEN (dst, 0); i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1583 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1584 unsigned int bytelen = GET_MODE_SIZE (mode);
1587 /* Handle trailing fragments that run over the size of the struct. */
1588 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1590 /* Arrange to shift the fragment to where it belongs.
1591 extract_bit_field loads to the lsb of the reg. */
1593 #ifdef BLOCK_REG_PADDING
1594 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1595 == (BYTES_BIG_ENDIAN ? upward : downward)
1600 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1601 bytelen = ssize - bytepos;
1606 /* If we won't be loading directly from memory, protect the real source
1607 from strange tricks we might play; but make sure that the source can
1608 be loaded directly into the destination. */
1610 if (!MEM_P (orig_src)
1611 && (!CONSTANT_P (orig_src)
1612 || (GET_MODE (orig_src) != mode
1613 && GET_MODE (orig_src) != VOIDmode)))
1615 if (GET_MODE (orig_src) == VOIDmode)
1616 src = gen_reg_rtx (mode);
1618 src = gen_reg_rtx (GET_MODE (orig_src));
1620 emit_move_insn (src, orig_src);
1623 /* Optimize the access just a bit. */
1625 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1626 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1627 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1628 && bytelen == GET_MODE_SIZE (mode))
1630 tmps[i] = gen_reg_rtx (mode);
1631 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1633 else if (GET_CODE (src) == CONCAT)
1635 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1636 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1638 if ((bytepos == 0 && bytelen == slen0)
1639 || (bytepos != 0 && bytepos + bytelen <= slen))
1641 /* The following assumes that the concatenated objects all
1642 have the same size. In this case, a simple calculation
1643 can be used to determine the object and the bit field
1645 tmps[i] = XEXP (src, bytepos / slen0);
1646 if (! CONSTANT_P (tmps[i])
1647 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1648 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1649 (bytepos % slen0) * BITS_PER_UNIT,
1650 1, NULL_RTX, mode, mode);
1652 else if (bytepos == 0)
1654 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1655 emit_move_insn (mem, src);
1656 tmps[i] = adjust_address (mem, mode, 0);
1661 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1662 SIMD register, which is currently broken. While we get GCC
1663 to emit proper RTL for these cases, let's dump to memory. */
1664 else if (VECTOR_MODE_P (GET_MODE (dst))
1667 int slen = GET_MODE_SIZE (GET_MODE (src));
1670 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1671 emit_move_insn (mem, src);
1672 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1674 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1675 && XVECLEN (dst, 0) > 1)
1676 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1677 else if (CONSTANT_P (src)
1678 || (REG_P (src) && GET_MODE (src) == mode))
1681 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1682 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1686 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1687 build_int_2 (shift, 0), tmps[i], 0);
1690 /* Copy the extracted pieces into the proper (probable) hard regs. */
1691 for (i = start; i < XVECLEN (dst, 0); i++)
1692 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1695 /* Emit code to move a block SRC to block DST, where SRC and DST are
1696 non-consecutive groups of registers, each represented by a PARALLEL. */
1699 emit_group_move (rtx dst, rtx src)
1703 if (GET_CODE (src) != PARALLEL
1704 || GET_CODE (dst) != PARALLEL
1705 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1708 /* Skip first entry if NULL. */
1709 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1710 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1711 XEXP (XVECEXP (src, 0, i), 0));
1714 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1715 where SRC is non-consecutive registers represented by a PARALLEL.
1716 SSIZE represents the total size of block ORIG_DST, or -1 if not
1720 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1725 if (GET_CODE (src) != PARALLEL)
1728 /* Check for a NULL entry, used to indicate that the parameter goes
1729 both on the stack and in registers. */
1730 if (XEXP (XVECEXP (src, 0, 0), 0))
1735 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1737 /* Copy the (probable) hard regs into pseudos. */
1738 for (i = start; i < XVECLEN (src, 0); i++)
1740 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1741 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1742 emit_move_insn (tmps[i], reg);
1745 /* If we won't be storing directly into memory, protect the real destination
1746 from strange tricks we might play. */
1748 if (GET_CODE (dst) == PARALLEL)
1752 /* We can get a PARALLEL dst if there is a conditional expression in
1753 a return statement. In that case, the dst and src are the same,
1754 so no action is necessary. */
1755 if (rtx_equal_p (dst, src))
1758 /* It is unclear if we can ever reach here, but we may as well handle
1759 it. Allocate a temporary, and split this into a store/load to/from
1762 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1763 emit_group_store (temp, src, type, ssize);
1764 emit_group_load (dst, temp, type, ssize);
1767 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1769 dst = gen_reg_rtx (GET_MODE (orig_dst));
1770 /* Make life a bit easier for combine. */
1771 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1774 /* Process the pieces. */
1775 for (i = start; i < XVECLEN (src, 0); i++)
1777 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1778 enum machine_mode mode = GET_MODE (tmps[i]);
1779 unsigned int bytelen = GET_MODE_SIZE (mode);
1782 /* Handle trailing fragments that run over the size of the struct. */
1783 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1785 /* store_bit_field always takes its value from the lsb.
1786 Move the fragment to the lsb if it's not already there. */
1788 #ifdef BLOCK_REG_PADDING
1789 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1790 == (BYTES_BIG_ENDIAN ? upward : downward)
1796 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1797 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1798 build_int_2 (shift, 0), tmps[i], 0);
1800 bytelen = ssize - bytepos;
1803 if (GET_CODE (dst) == CONCAT)
1805 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1806 dest = XEXP (dst, 0);
1807 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1809 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1810 dest = XEXP (dst, 1);
1812 else if (bytepos == 0 && XVECLEN (src, 0))
1814 dest = assign_stack_temp (GET_MODE (dest),
1815 GET_MODE_SIZE (GET_MODE (dest)), 0);
1816 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1825 /* Optimize the access just a bit. */
1827 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1828 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1829 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1830 && bytelen == GET_MODE_SIZE (mode))
1831 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1833 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1837 /* Copy from the pseudo into the (probable) hard reg. */
1838 if (orig_dst != dst)
1839 emit_move_insn (orig_dst, dst);
1842 /* Generate code to copy a BLKmode object of TYPE out of a
1843 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1844 is null, a stack temporary is created. TGTBLK is returned.
1846 The purpose of this routine is to handle functions that return
1847 BLKmode structures in registers. Some machines (the PA for example)
1848 want to return all small structures in registers regardless of the
1849 structure's alignment. */
1852 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1854 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1855 rtx src = NULL, dst = NULL;
1856 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1857 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1861 tgtblk = assign_temp (build_qualified_type (type,
1863 | TYPE_QUAL_CONST)),
1865 preserve_temp_slots (tgtblk);
1868 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1869 into a new pseudo which is a full word. */
1871 if (GET_MODE (srcreg) != BLKmode
1872 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1873 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1875 /* If the structure doesn't take up a whole number of words, see whether
1876 SRCREG is padded on the left or on the right. If it's on the left,
1877 set PADDING_CORRECTION to the number of bits to skip.
1879 In most ABIs, the structure will be returned at the least end of
1880 the register, which translates to right padding on little-endian
1881 targets and left padding on big-endian targets. The opposite
1882 holds if the structure is returned at the most significant
1883 end of the register. */
1884 if (bytes % UNITS_PER_WORD != 0
1885 && (targetm.calls.return_in_msb (type)
1887 : BYTES_BIG_ENDIAN))
1889 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1891 /* Copy the structure BITSIZE bites at a time.
1893 We could probably emit more efficient code for machines which do not use
1894 strict alignment, but it doesn't seem worth the effort at the current
1896 for (bitpos = 0, xbitpos = padding_correction;
1897 bitpos < bytes * BITS_PER_UNIT;
1898 bitpos += bitsize, xbitpos += bitsize)
1900 /* We need a new source operand each time xbitpos is on a
1901 word boundary and when xbitpos == padding_correction
1902 (the first time through). */
1903 if (xbitpos % BITS_PER_WORD == 0
1904 || xbitpos == padding_correction)
1905 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1908 /* We need a new destination operand each time bitpos is on
1910 if (bitpos % BITS_PER_WORD == 0)
1911 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1913 /* Use xbitpos for the source extraction (right justified) and
1914 xbitpos for the destination store (left justified). */
1915 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1916 extract_bit_field (src, bitsize,
1917 xbitpos % BITS_PER_WORD, 1,
1918 NULL_RTX, word_mode, word_mode));
1924 /* Add a USE expression for REG to the (possibly empty) list pointed
1925 to by CALL_FUSAGE. REG must denote a hard register. */
1928 use_reg (rtx *call_fusage, rtx reg)
1931 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1935 = gen_rtx_EXPR_LIST (VOIDmode,
1936 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1939 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1940 starting at REGNO. All of these registers must be hard registers. */
1943 use_regs (rtx *call_fusage, int regno, int nregs)
1947 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1950 for (i = 0; i < nregs; i++)
1951 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1954 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1955 PARALLEL REGS. This is for calls that pass values in multiple
1956 non-contiguous locations. The Irix 6 ABI has examples of this. */
1959 use_group_regs (rtx *call_fusage, rtx regs)
1963 for (i = 0; i < XVECLEN (regs, 0); i++)
1965 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1967 /* A NULL entry means the parameter goes both on the stack and in
1968 registers. This can also be a MEM for targets that pass values
1969 partially on the stack and partially in registers. */
1970 if (reg != 0 && REG_P (reg))
1971 use_reg (call_fusage, reg);
1976 /* Determine whether the LEN bytes generated by CONSTFUN can be
1977 stored to memory using several move instructions. CONSTFUNDATA is
1978 a pointer which will be passed as argument in every CONSTFUN call.
1979 ALIGN is maximum alignment we can assume. Return nonzero if a
1980 call to store_by_pieces should succeed. */
1983 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1984 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1985 void *constfundata, unsigned int align)
1987 unsigned HOST_WIDE_INT max_size, l;
1988 HOST_WIDE_INT offset = 0;
1989 enum machine_mode mode, tmode;
1990 enum insn_code icode;
1997 if (! STORE_BY_PIECES_P (len, align))
2000 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2001 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2002 align = MOVE_MAX * BITS_PER_UNIT;
2004 /* We would first store what we can in the largest integer mode, then go to
2005 successively smaller modes. */
2008 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2013 max_size = STORE_MAX_PIECES + 1;
2014 while (max_size > 1)
2016 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2017 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2018 if (GET_MODE_SIZE (tmode) < max_size)
2021 if (mode == VOIDmode)
2024 icode = mov_optab->handlers[(int) mode].insn_code;
2025 if (icode != CODE_FOR_nothing
2026 && align >= GET_MODE_ALIGNMENT (mode))
2028 unsigned int size = GET_MODE_SIZE (mode);
2035 cst = (*constfun) (constfundata, offset, mode);
2036 if (!LEGITIMATE_CONSTANT_P (cst))
2046 max_size = GET_MODE_SIZE (mode);
2049 /* The code above should have handled everything. */
2057 /* Generate several move instructions to store LEN bytes generated by
2058 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2059 pointer which will be passed as argument in every CONSTFUN call.
2060 ALIGN is maximum alignment we can assume.
2061 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2062 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2066 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2067 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2068 void *constfundata, unsigned int align, int endp)
2070 struct store_by_pieces data;
2079 if (! STORE_BY_PIECES_P (len, align))
2081 data.constfun = constfun;
2082 data.constfundata = constfundata;
2085 store_by_pieces_1 (&data, align);
2096 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2097 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2099 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2102 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2109 to1 = adjust_address (data.to, QImode, data.offset);
2117 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2118 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2121 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2123 struct store_by_pieces data;
2128 data.constfun = clear_by_pieces_1;
2129 data.constfundata = NULL;
2132 store_by_pieces_1 (&data, align);
2135 /* Callback routine for clear_by_pieces.
2136 Return const0_rtx unconditionally. */
2139 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2140 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2141 enum machine_mode mode ATTRIBUTE_UNUSED)
2146 /* Subroutine of clear_by_pieces and store_by_pieces.
2147 Generate several move instructions to store LEN bytes of block TO. (A MEM
2148 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2151 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2152 unsigned int align ATTRIBUTE_UNUSED)
2154 rtx to_addr = XEXP (data->to, 0);
2155 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2156 enum machine_mode mode = VOIDmode, tmode;
2157 enum insn_code icode;
2160 data->to_addr = to_addr;
2162 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2163 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2165 data->explicit_inc_to = 0;
2167 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2169 data->offset = data->len;
2171 /* If storing requires more than two move insns,
2172 copy addresses to registers (to make displacements shorter)
2173 and use post-increment if available. */
2174 if (!data->autinc_to
2175 && move_by_pieces_ninsns (data->len, align) > 2)
2177 /* Determine the main mode we'll be using. */
2178 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2179 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2180 if (GET_MODE_SIZE (tmode) < max_size)
2183 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2185 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2186 data->autinc_to = 1;
2187 data->explicit_inc_to = -1;
2190 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2191 && ! data->autinc_to)
2193 data->to_addr = copy_addr_to_reg (to_addr);
2194 data->autinc_to = 1;
2195 data->explicit_inc_to = 1;
2198 if ( !data->autinc_to && CONSTANT_P (to_addr))
2199 data->to_addr = copy_addr_to_reg (to_addr);
2202 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2203 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2204 align = MOVE_MAX * BITS_PER_UNIT;
2206 /* First store what we can in the largest integer mode, then go to
2207 successively smaller modes. */
2209 while (max_size > 1)
2211 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2212 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) < max_size)
2216 if (mode == VOIDmode)
2219 icode = mov_optab->handlers[(int) mode].insn_code;
2220 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2221 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2223 max_size = GET_MODE_SIZE (mode);
2226 /* The code above should have handled everything. */
2231 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2232 with move instructions for mode MODE. GENFUN is the gen_... function
2233 to make a move insn for that mode. DATA has all the other info. */
2236 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2237 struct store_by_pieces *data)
2239 unsigned int size = GET_MODE_SIZE (mode);
2242 while (data->len >= size)
2245 data->offset -= size;
2247 if (data->autinc_to)
2248 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2251 to1 = adjust_address (data->to, mode, data->offset);
2253 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2254 emit_insn (gen_add2_insn (data->to_addr,
2255 GEN_INT (-(HOST_WIDE_INT) size)));
2257 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2258 emit_insn ((*genfun) (to1, cst));
2260 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2261 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2263 if (! data->reverse)
2264 data->offset += size;
2270 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2271 its length in bytes. */
2274 clear_storage (rtx object, rtx size)
2277 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2278 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2280 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2281 just move a zero. Otherwise, do this a piece at a time. */
2282 if (GET_MODE (object) != BLKmode
2283 && GET_CODE (size) == CONST_INT
2284 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2285 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2288 if (size == const0_rtx)
2290 else if (GET_CODE (size) == CONST_INT
2291 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2292 clear_by_pieces (object, INTVAL (size), align);
2293 else if (clear_storage_via_clrmem (object, size, align))
2296 retval = clear_storage_via_libcall (object, size);
2302 /* A subroutine of clear_storage. Expand a clrmem pattern;
2303 return true if successful. */
2306 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2308 /* Try the most limited insn first, because there's no point
2309 including more than one in the machine description unless
2310 the more limited one has some advantage. */
2312 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2313 enum machine_mode mode;
2315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2318 enum insn_code code = clrmem_optab[(int) mode];
2319 insn_operand_predicate_fn pred;
2321 if (code != CODE_FOR_nothing
2322 /* We don't need MODE to be narrower than
2323 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2324 the mode mask, as it is returned by the macro, it will
2325 definitely be less than the actual mode mask. */
2326 && ((GET_CODE (size) == CONST_INT
2327 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2328 <= (GET_MODE_MASK (mode) >> 1)))
2329 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2330 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2331 || (*pred) (object, BLKmode))
2332 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2333 || (*pred) (opalign, VOIDmode)))
2336 rtx last = get_last_insn ();
2339 op1 = convert_to_mode (mode, size, 1);
2340 pred = insn_data[(int) code].operand[1].predicate;
2341 if (pred != 0 && ! (*pred) (op1, mode))
2342 op1 = copy_to_mode_reg (mode, op1);
2344 pat = GEN_FCN ((int) code) (object, op1, opalign);
2351 delete_insns_since (last);
2358 /* A subroutine of clear_storage. Expand a call to memset.
2359 Return the return value of memset, 0 otherwise. */
2362 clear_storage_via_libcall (rtx object, rtx size)
2364 tree call_expr, arg_list, fn, object_tree, size_tree;
2365 enum machine_mode size_mode;
2368 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2369 place those into new pseudos into a VAR_DECL and use them later. */
2371 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2373 size_mode = TYPE_MODE (sizetype);
2374 size = convert_to_mode (size_mode, size, 1);
2375 size = copy_to_mode_reg (size_mode, size);
2377 /* It is incorrect to use the libcall calling conventions to call
2378 memset in this context. This could be a user call to memset and
2379 the user may wish to examine the return value from memset. For
2380 targets where libcalls and normal calls have different conventions
2381 for returning pointers, we could end up generating incorrect code. */
2383 object_tree = make_tree (ptr_type_node, object);
2384 size_tree = make_tree (sizetype, size);
2386 fn = clear_storage_libcall_fn (true);
2387 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2388 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2389 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2391 /* Now we have to build up the CALL_EXPR itself. */
2392 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2393 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2394 call_expr, arg_list, NULL_TREE);
2396 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2398 /* If we are initializing a readonly value, show the above call
2399 clobbered it. Otherwise, a load from it may erroneously be
2400 hoisted from a loop. */
2401 if (RTX_UNCHANGING_P (object))
2402 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2407 /* A subroutine of clear_storage_via_libcall. Create the tree node
2408 for the function we use for block clears. The first time FOR_CALL
2409 is true, we call assemble_external. */
2411 static GTY(()) tree block_clear_fn;
2414 init_block_clear_fn (const char *asmspec)
2416 if (!block_clear_fn)
2420 fn = get_identifier ("memset");
2421 args = build_function_type_list (ptr_type_node, ptr_type_node,
2422 integer_type_node, sizetype,
2425 fn = build_decl (FUNCTION_DECL, fn, args);
2426 DECL_EXTERNAL (fn) = 1;
2427 TREE_PUBLIC (fn) = 1;
2428 DECL_ARTIFICIAL (fn) = 1;
2429 TREE_NOTHROW (fn) = 1;
2431 block_clear_fn = fn;
2436 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2437 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2442 clear_storage_libcall_fn (int for_call)
2444 static bool emitted_extern;
2446 if (!block_clear_fn)
2447 init_block_clear_fn (NULL);
2449 if (for_call && !emitted_extern)
2451 emitted_extern = true;
2452 make_decl_rtl (block_clear_fn, NULL);
2453 assemble_external (block_clear_fn);
2456 return block_clear_fn;
2459 /* Generate code to copy Y into X.
2460 Both Y and X must have the same mode, except that
2461 Y can be a constant with VOIDmode.
2462 This mode cannot be BLKmode; use emit_block_move for that.
2464 Return the last instruction emitted. */
2467 emit_move_insn (rtx x, rtx y)
2469 enum machine_mode mode = GET_MODE (x);
2470 rtx y_cst = NULL_RTX;
2473 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2479 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2480 && (last_insn = compress_float_constant (x, y)))
2485 if (!LEGITIMATE_CONSTANT_P (y))
2487 y = force_const_mem (mode, y);
2489 /* If the target's cannot_force_const_mem prevented the spill,
2490 assume that the target's move expanders will also take care
2491 of the non-legitimate constant. */
2497 /* If X or Y are memory references, verify that their addresses are valid
2500 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2501 && ! push_operand (x, GET_MODE (x)))
2503 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2504 x = validize_mem (x);
2507 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2509 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2510 y = validize_mem (y);
2512 if (mode == BLKmode)
2515 last_insn = emit_move_insn_1 (x, y);
2517 if (y_cst && REG_P (x)
2518 && (set = single_set (last_insn)) != NULL_RTX
2519 && SET_DEST (set) == x
2520 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2521 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2526 /* Low level part of emit_move_insn.
2527 Called just like emit_move_insn, but assumes X and Y
2528 are basically valid. */
2531 emit_move_insn_1 (rtx x, rtx y)
2533 enum machine_mode mode = GET_MODE (x);
2534 enum machine_mode submode;
2535 enum mode_class class = GET_MODE_CLASS (mode);
2537 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2540 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2542 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2544 /* Expand complex moves by moving real part and imag part, if possible. */
2545 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2546 && BLKmode != (submode = GET_MODE_INNER (mode))
2547 && (mov_optab->handlers[(int) submode].insn_code
2548 != CODE_FOR_nothing))
2550 /* Don't split destination if it is a stack push. */
2551 int stack = push_operand (x, GET_MODE (x));
2553 #ifdef PUSH_ROUNDING
2554 /* In case we output to the stack, but the size is smaller than the
2555 machine can push exactly, we need to use move instructions. */
2557 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2558 != GET_MODE_SIZE (submode)))
2561 HOST_WIDE_INT offset1, offset2;
2563 /* Do not use anti_adjust_stack, since we don't want to update
2564 stack_pointer_delta. */
2565 temp = expand_binop (Pmode,
2566 #ifdef STACK_GROWS_DOWNWARD
2574 (GET_MODE_SIZE (GET_MODE (x)))),
2575 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2577 if (temp != stack_pointer_rtx)
2578 emit_move_insn (stack_pointer_rtx, temp);
2580 #ifdef STACK_GROWS_DOWNWARD
2582 offset2 = GET_MODE_SIZE (submode);
2584 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2585 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2586 + GET_MODE_SIZE (submode));
2589 emit_move_insn (change_address (x, submode,
2590 gen_rtx_PLUS (Pmode,
2592 GEN_INT (offset1))),
2593 gen_realpart (submode, y));
2594 emit_move_insn (change_address (x, submode,
2595 gen_rtx_PLUS (Pmode,
2597 GEN_INT (offset2))),
2598 gen_imagpart (submode, y));
2602 /* If this is a stack, push the highpart first, so it
2603 will be in the argument order.
2605 In that case, change_address is used only to convert
2606 the mode, not to change the address. */
2609 /* Note that the real part always precedes the imag part in memory
2610 regardless of machine's endianness. */
2611 #ifdef STACK_GROWS_DOWNWARD
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
2614 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2615 gen_realpart (submode, y));
2617 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2618 gen_realpart (submode, y));
2619 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2620 gen_imagpart (submode, y));
2625 rtx realpart_x, realpart_y;
2626 rtx imagpart_x, imagpart_y;
2628 /* If this is a complex value with each part being smaller than a
2629 word, the usual calling sequence will likely pack the pieces into
2630 a single register. Unfortunately, SUBREG of hard registers only
2631 deals in terms of words, so we have a problem converting input
2632 arguments to the CONCAT of two registers that is used elsewhere
2633 for complex values. If this is before reload, we can copy it into
2634 memory and reload. FIXME, we should see about using extract and
2635 insert on integer registers, but complex short and complex char
2636 variables should be rarely used. */
2637 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2638 && (reload_in_progress | reload_completed) == 0)
2641 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2643 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2645 if (packed_dest_p || packed_src_p)
2647 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2648 ? MODE_FLOAT : MODE_INT);
2650 enum machine_mode reg_mode
2651 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2653 if (reg_mode != BLKmode)
2655 rtx mem = assign_stack_temp (reg_mode,
2656 GET_MODE_SIZE (mode), 0);
2657 rtx cmem = adjust_address (mem, mode, 0);
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2663 emit_move_insn_1 (cmem, y);
2664 return emit_move_insn_1 (sreg, mem);
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2670 emit_move_insn_1 (mem, sreg);
2671 return emit_move_insn_1 (x, cmem);
2677 realpart_x = gen_realpart (submode, x);
2678 realpart_y = gen_realpart (submode, y);
2679 imagpart_x = gen_imagpart (submode, x);
2680 imagpart_y = gen_imagpart (submode, y);
2682 /* Show the output dies here. This is necessary for SUBREGs
2683 of pseudos since we cannot track their lifetimes correctly;
2684 hard regs shouldn't appear here except as return values.
2685 We never want to emit such a clobber after reload. */
2687 && ! (reload_in_progress || reload_completed)
2688 && (GET_CODE (realpart_x) == SUBREG
2689 || GET_CODE (imagpart_x) == SUBREG))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2692 emit_move_insn (realpart_x, realpart_y);
2693 emit_move_insn (imagpart_x, imagpart_y);
2696 return get_last_insn ();
2699 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2700 find a mode to do it in. If we have a movcc, use it. Otherwise,
2701 find the MODE_INT mode of the same width. */
2702 else if (GET_MODE_CLASS (mode) == MODE_CC
2703 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2705 enum insn_code insn_code;
2706 enum machine_mode tmode = VOIDmode;
2710 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2713 for (tmode = QImode; tmode != VOIDmode;
2714 tmode = GET_MODE_WIDER_MODE (tmode))
2715 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2718 if (tmode == VOIDmode)
2721 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2722 may call change_address which is not appropriate if we were
2723 called when a reload was in progress. We don't have to worry
2724 about changing the address since the size in bytes is supposed to
2725 be the same. Copy the MEM to change the mode and move any
2726 substitutions from the old MEM to the new one. */
2728 if (reload_in_progress)
2730 x = gen_lowpart_common (tmode, x1);
2731 if (x == 0 && MEM_P (x1))
2733 x = adjust_address_nv (x1, tmode, 0);
2734 copy_replacements (x1, x);
2737 y = gen_lowpart_common (tmode, y1);
2738 if (y == 0 && MEM_P (y1))
2740 y = adjust_address_nv (y1, tmode, 0);
2741 copy_replacements (y1, y);
2746 x = gen_lowpart (tmode, x);
2747 y = gen_lowpart (tmode, y);
2750 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2751 return emit_insn (GEN_FCN (insn_code) (x, y));
2754 /* Try using a move pattern for the corresponding integer mode. This is
2755 only safe when simplify_subreg can convert MODE constants into integer
2756 constants. At present, it can only do this reliably if the value
2757 fits within a HOST_WIDE_INT. */
2758 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2759 && (submode = int_mode_for_mode (mode)) != BLKmode
2760 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2761 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2762 (simplify_gen_subreg (submode, x, mode, 0),
2763 simplify_gen_subreg (submode, y, mode, 0)));
2765 /* This will handle any multi-word or full-word mode that lacks a move_insn
2766 pattern. However, you will get better code if you define such patterns,
2767 even if they must turn into multiple assembler instructions. */
2768 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2775 #ifdef PUSH_ROUNDING
2777 /* If X is a push on the stack, do the push now and replace
2778 X with a reference to the stack pointer. */
2779 if (push_operand (x, GET_MODE (x)))
2784 /* Do not use anti_adjust_stack, since we don't want to update
2785 stack_pointer_delta. */
2786 temp = expand_binop (Pmode,
2787 #ifdef STACK_GROWS_DOWNWARD
2795 (GET_MODE_SIZE (GET_MODE (x)))),
2796 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2798 if (temp != stack_pointer_rtx)
2799 emit_move_insn (stack_pointer_rtx, temp);
2801 code = GET_CODE (XEXP (x, 0));
2803 /* Just hope that small offsets off SP are OK. */
2804 if (code == POST_INC)
2805 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2806 GEN_INT (-((HOST_WIDE_INT)
2807 GET_MODE_SIZE (GET_MODE (x)))));
2808 else if (code == POST_DEC)
2809 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2810 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2812 temp = stack_pointer_rtx;
2814 x = change_address (x, VOIDmode, temp);
2818 /* If we are in reload, see if either operand is a MEM whose address
2819 is scheduled for replacement. */
2820 if (reload_in_progress && MEM_P (x)
2821 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2822 x = replace_equiv_address_nv (x, inner);
2823 if (reload_in_progress && MEM_P (y)
2824 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2825 y = replace_equiv_address_nv (y, inner);
2831 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2834 rtx xpart = operand_subword (x, i, 1, mode);
2835 rtx ypart = operand_subword (y, i, 1, mode);
2837 /* If we can't get a part of Y, put Y into memory if it is a
2838 constant. Otherwise, force it into a register. If we still
2839 can't get a part of Y, abort. */
2840 if (ypart == 0 && CONSTANT_P (y))
2842 y = force_const_mem (mode, y);
2843 ypart = operand_subword (y, i, 1, mode);
2845 else if (ypart == 0)
2846 ypart = operand_subword_force (y, i, mode);
2848 if (xpart == 0 || ypart == 0)
2851 need_clobber |= (GET_CODE (xpart) == SUBREG);
2853 last_insn = emit_move_insn (xpart, ypart);
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2864 && ! (reload_in_progress || reload_completed)
2865 && need_clobber != 0)
2866 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2876 /* If Y is representable exactly in a narrower mode, and the target can
2877 perform the extension directly from constant or memory, then emit the
2878 move as an extension. */
2881 compress_float_constant (rtx x, rtx y)
2883 enum machine_mode dstmode = GET_MODE (x);
2884 enum machine_mode orig_srcmode = GET_MODE (y);
2885 enum machine_mode srcmode;
2888 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2890 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2891 srcmode != orig_srcmode;
2892 srcmode = GET_MODE_WIDER_MODE (srcmode))
2895 rtx trunc_y, last_insn;
2897 /* Skip if the target can't extend this way. */
2898 ic = can_extend_p (dstmode, srcmode, 0);
2899 if (ic == CODE_FOR_nothing)
2902 /* Skip if the narrowed value isn't exact. */
2903 if (! exact_real_truncate (srcmode, &r))
2906 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2908 if (LEGITIMATE_CONSTANT_P (trunc_y))
2910 /* Skip if the target needs extra instructions to perform
2912 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2915 else if (float_extend_from_mem[dstmode][srcmode])
2916 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2920 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2921 last_insn = get_last_insn ();
2924 set_unique_reg_note (last_insn, REG_EQUAL, y);
2932 /* Pushing data onto the stack. */
2934 /* Push a block of length SIZE (perhaps variable)
2935 and return an rtx to address the beginning of the block.
2936 The value may be virtual_outgoing_args_rtx.
2938 EXTRA is the number of bytes of padding to push in addition to SIZE.
2939 BELOW nonzero means this padding comes at low addresses;
2940 otherwise, the padding comes at high addresses. */
2943 push_block (rtx size, int extra, int below)
2947 size = convert_modes (Pmode, ptr_mode, size, 1);
2948 if (CONSTANT_P (size))
2949 anti_adjust_stack (plus_constant (size, extra));
2950 else if (REG_P (size) && extra == 0)
2951 anti_adjust_stack (size);
2954 temp = copy_to_mode_reg (Pmode, size);
2956 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2957 temp, 0, OPTAB_LIB_WIDEN);
2958 anti_adjust_stack (temp);
2961 #ifndef STACK_GROWS_DOWNWARD
2967 temp = virtual_outgoing_args_rtx;
2968 if (extra != 0 && below)
2969 temp = plus_constant (temp, extra);
2973 if (GET_CODE (size) == CONST_INT)
2974 temp = plus_constant (virtual_outgoing_args_rtx,
2975 -INTVAL (size) - (below ? 0 : extra));
2976 else if (extra != 0 && !below)
2977 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2978 negate_rtx (Pmode, plus_constant (size, extra)));
2980 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2981 negate_rtx (Pmode, size));
2984 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2987 #ifdef PUSH_ROUNDING
2989 /* Emit single push insn. */
2992 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2995 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2997 enum insn_code icode;
2998 insn_operand_predicate_fn pred;
3000 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3001 /* If there is push pattern, use it. Otherwise try old way of throwing
3002 MEM representing push operation to move expander. */
3003 icode = push_optab->handlers[(int) mode].insn_code;
3004 if (icode != CODE_FOR_nothing)
3006 if (((pred = insn_data[(int) icode].operand[0].predicate)
3007 && !((*pred) (x, mode))))
3008 x = force_reg (mode, x);
3009 emit_insn (GEN_FCN (icode) (x));
3012 if (GET_MODE_SIZE (mode) == rounded_size)
3013 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3014 /* If we are to pad downward, adjust the stack pointer first and
3015 then store X into the stack location using an offset. This is
3016 because emit_move_insn does not know how to pad; it does not have
3018 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3020 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3021 HOST_WIDE_INT offset;
3023 emit_move_insn (stack_pointer_rtx,
3024 expand_binop (Pmode,
3025 #ifdef STACK_GROWS_DOWNWARD
3031 GEN_INT (rounded_size),
3032 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3034 offset = (HOST_WIDE_INT) padding_size;
3035 #ifdef STACK_GROWS_DOWNWARD
3036 if (STACK_PUSH_CODE == POST_DEC)
3037 /* We have already decremented the stack pointer, so get the
3039 offset += (HOST_WIDE_INT) rounded_size;
3041 if (STACK_PUSH_CODE == POST_INC)
3042 /* We have already incremented the stack pointer, so get the
3044 offset -= (HOST_WIDE_INT) rounded_size;
3046 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3050 #ifdef STACK_GROWS_DOWNWARD
3051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3053 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3055 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3056 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3057 GEN_INT (rounded_size));
3059 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3062 dest = gen_rtx_MEM (mode, dest_addr);
3066 set_mem_attributes (dest, type, 1);
3068 if (flag_optimize_sibling_calls)
3069 /* Function incoming arguments may overlap with sibling call
3070 outgoing arguments and we cannot allow reordering of reads
3071 from function arguments with stores to outgoing arguments
3072 of sibling calls. */
3073 set_mem_alias_set (dest, 0);
3075 emit_move_insn (dest, x);
3079 /* Generate code to push X onto the stack, assuming it has mode MODE and
3081 MODE is redundant except when X is a CONST_INT (since they don't
3083 SIZE is an rtx for the size of data to be copied (in bytes),
3084 needed only if X is BLKmode.
3086 ALIGN (in bits) is maximum alignment we can assume.
3088 If PARTIAL and REG are both nonzero, then copy that many of the first
3089 words of X into registers starting with REG, and push the rest of X.
3090 The amount of space pushed is decreased by PARTIAL words,
3091 rounded *down* to a multiple of PARM_BOUNDARY.
3092 REG must be a hard register in this case.
3093 If REG is zero but PARTIAL is not, take any all others actions for an
3094 argument partially in registers, but do not actually load any
3097 EXTRA is the amount in bytes of extra space to leave next to this arg.
3098 This is ignored if an argument block has already been allocated.
3100 On a machine that lacks real push insns, ARGS_ADDR is the address of
3101 the bottom of the argument block for this call. We use indexing off there
3102 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3103 argument block has not been preallocated.
3105 ARGS_SO_FAR is the size of args previously pushed for this call.
3107 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3108 for arguments passed in registers. If nonzero, it will be the number
3109 of bytes required. */
3112 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3113 unsigned int align, int partial, rtx reg, int extra,
3114 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3118 enum direction stack_direction
3119 #ifdef STACK_GROWS_DOWNWARD
3125 /* Decide where to pad the argument: `downward' for below,
3126 `upward' for above, or `none' for don't pad it.
3127 Default is below for small data on big-endian machines; else above. */
3128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3130 /* Invert direction if stack is post-decrement.
3132 if (STACK_PUSH_CODE == POST_DEC)
3133 if (where_pad != none)
3134 where_pad = (where_pad == downward ? upward : downward);
3138 if (mode == BLKmode)
3140 /* Copy a block into the stack, entirely or partially. */
3143 int used = partial * UNITS_PER_WORD;
3147 if (reg && GET_CODE (reg) == PARALLEL)
3149 /* Use the size of the elt to compute offset. */
3150 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3151 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3152 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3155 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3162 /* USED is now the # of bytes we need not copy to the stack
3163 because registers will take care of them. */
3166 xinner = adjust_address (xinner, BLKmode, used);
3168 /* If the partial register-part of the arg counts in its stack size,
3169 skip the part of stack space corresponding to the registers.
3170 Otherwise, start copying to the beginning of the stack space,
3171 by setting SKIP to 0. */
3172 skip = (reg_parm_stack_space == 0) ? 0 : used;
3174 #ifdef PUSH_ROUNDING
3175 /* Do it with several push insns if that doesn't take lots of insns
3176 and if there is no difficulty with push insns that skip bytes
3177 on the stack for alignment purposes. */
3180 && GET_CODE (size) == CONST_INT
3182 && MEM_ALIGN (xinner) >= align
3183 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3184 /* Here we avoid the case of a structure whose weak alignment
3185 forces many pushes of a small amount of data,
3186 and such small pushes do rounding that causes trouble. */
3187 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3188 || align >= BIGGEST_ALIGNMENT
3189 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3190 == (align / BITS_PER_UNIT)))
3191 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra && args_addr == 0
3197 && where_pad != none && where_pad != stack_direction)
3198 anti_adjust_stack (GEN_INT (extra));
3200 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3203 #endif /* PUSH_ROUNDING */
3207 /* Otherwise make space on the stack and copy the data
3208 to the address of that space. */
3210 /* Deduct words put into registers from the size we must copy. */
3213 if (GET_CODE (size) == CONST_INT)
3214 size = GEN_INT (INTVAL (size) - used);
3216 size = expand_binop (GET_MODE (size), sub_optab, size,
3217 GEN_INT (used), NULL_RTX, 0,
3221 /* Get the address of the stack space.
3222 In this case, we do not deal with EXTRA separately.
3223 A single stack adjust will do. */
3226 temp = push_block (size, extra, where_pad == downward);
3229 else if (GET_CODE (args_so_far) == CONST_INT)
3230 temp = memory_address (BLKmode,
3231 plus_constant (args_addr,
3232 skip + INTVAL (args_so_far)));
3234 temp = memory_address (BLKmode,
3235 plus_constant (gen_rtx_PLUS (Pmode,
3240 if (!ACCUMULATE_OUTGOING_ARGS)
3242 /* If the source is referenced relative to the stack pointer,
3243 copy it to another register to stabilize it. We do not need
3244 to do this if we know that we won't be changing sp. */
3246 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3247 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3248 temp = copy_to_reg (temp);
3251 target = gen_rtx_MEM (BLKmode, temp);
3255 set_mem_attributes (target, type, 1);
3256 /* Function incoming arguments may overlap with sibling call
3257 outgoing arguments and we cannot allow reordering of reads
3258 from function arguments with stores to outgoing arguments
3259 of sibling calls. */
3260 set_mem_alias_set (target, 0);
3263 /* ALIGN may well be better aligned than TYPE, e.g. due to
3264 PARM_BOUNDARY. Assume the caller isn't lying. */
3265 set_mem_align (target, align);
3267 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3270 else if (partial > 0)
3272 /* Scalar partly in registers. */
3274 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3277 /* # words of start of argument
3278 that we must make space for but need not store. */
3279 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3280 int args_offset = INTVAL (args_so_far);
3283 /* Push padding now if padding above and stack grows down,
3284 or if padding below and stack grows up.
3285 But if space already allocated, this has already been done. */
3286 if (extra && args_addr == 0
3287 && where_pad != none && where_pad != stack_direction)
3288 anti_adjust_stack (GEN_INT (extra));
3290 /* If we make space by pushing it, we might as well push
3291 the real data. Otherwise, we can leave OFFSET nonzero
3292 and leave the space uninitialized. */
3296 /* Now NOT_STACK gets the number of words that we don't need to
3297 allocate on the stack. */
3298 not_stack = partial - offset;
3300 /* If the partial register-part of the arg counts in its stack size,
3301 skip the part of stack space corresponding to the registers.
3302 Otherwise, start copying to the beginning of the stack space,
3303 by setting SKIP to 0. */
3304 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3306 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3307 x = validize_mem (force_const_mem (mode, x));
3309 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3310 SUBREGs of such registers are not allowed. */
3311 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3312 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3313 x = copy_to_reg (x);
3315 /* Loop over all the words allocated on the stack for this arg. */
3316 /* We can do it by words, because any scalar bigger than a word
3317 has a size a multiple of a word. */
3318 #ifndef PUSH_ARGS_REVERSED
3319 for (i = not_stack; i < size; i++)
3321 for (i = size - 1; i >= not_stack; i--)
3323 if (i >= not_stack + offset)
3324 emit_push_insn (operand_subword_force (x, i, mode),
3325 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3327 GEN_INT (args_offset + ((i - not_stack + skip)
3329 reg_parm_stack_space, alignment_pad);
3336 /* Push padding now if padding above and stack grows down,
3337 or if padding below and stack grows up.
3338 But if space already allocated, this has already been done. */
3339 if (extra && args_addr == 0
3340 && where_pad != none && where_pad != stack_direction)
3341 anti_adjust_stack (GEN_INT (extra));
3343 #ifdef PUSH_ROUNDING
3344 if (args_addr == 0 && PUSH_ARGS)
3345 emit_single_push_insn (mode, x, type);
3349 if (GET_CODE (args_so_far) == CONST_INT)
3351 = memory_address (mode,
3352 plus_constant (args_addr,
3353 INTVAL (args_so_far)));
3355 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3357 dest = gen_rtx_MEM (mode, addr);
3360 set_mem_attributes (dest, type, 1);
3361 /* Function incoming arguments may overlap with sibling call
3362 outgoing arguments and we cannot allow reordering of reads
3363 from function arguments with stores to outgoing arguments
3364 of sibling calls. */
3365 set_mem_alias_set (dest, 0);
3368 emit_move_insn (dest, x);
3372 /* If part should go in registers, copy that part
3373 into the appropriate registers. Do this now, at the end,
3374 since mem-to-mem copies above may do function calls. */
3375 if (partial > 0 && reg != 0)
3377 /* Handle calls that pass values in multiple non-contiguous locations.
3378 The Irix 6 ABI has examples of this. */
3379 if (GET_CODE (reg) == PARALLEL)
3380 emit_group_load (reg, x, type, -1);
3382 move_block_to_reg (REGNO (reg), x, partial, mode);
3385 if (extra && args_addr == 0 && where_pad == stack_direction)
3386 anti_adjust_stack (GEN_INT (extra));
3388 if (alignment_pad && args_addr == 0)
3389 anti_adjust_stack (alignment_pad);
3392 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3396 get_subtarget (rtx x)
3399 /* Only registers can be subtargets. */
3401 /* If the register is readonly, it can't be set more than once. */
3402 || RTX_UNCHANGING_P (x)
3403 /* Don't use hard regs to avoid extending their life. */
3404 || REGNO (x) < FIRST_PSEUDO_REGISTER
3405 /* Avoid subtargets inside loops,
3406 since they hide some invariant expressions. */
3407 || preserve_subexpressions_p ())
3411 /* Expand an assignment that stores the value of FROM into TO.
3412 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3413 (If the value is constant, this rtx is a constant.)
3414 Otherwise, the returned value is NULL_RTX. */
3417 expand_assignment (tree to, tree from, int want_value)
3422 /* Don't crash if the lhs of the assignment was erroneous. */
3424 if (TREE_CODE (to) == ERROR_MARK)
3426 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3427 return want_value ? result : NULL_RTX;
3430 /* Assignment of a structure component needs special treatment
3431 if the structure component's rtx is not simply a MEM.
3432 Assignment of an array element at a constant index, and assignment of
3433 an array element in an unaligned packed structure field, has the same
3436 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3437 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3438 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3440 enum machine_mode mode1;
3441 HOST_WIDE_INT bitsize, bitpos;
3449 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3450 &unsignedp, &volatilep);
3452 /* If we are going to use store_bit_field and extract_bit_field,
3453 make sure to_rtx will be safe for multiple use. */
3455 if (mode1 == VOIDmode && want_value)
3456 tem = stabilize_reference (tem);
3458 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3462 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3464 if (!MEM_P (to_rtx))
3467 #ifdef POINTERS_EXTEND_UNSIGNED
3468 if (GET_MODE (offset_rtx) != Pmode)
3469 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3471 if (GET_MODE (offset_rtx) != ptr_mode)
3472 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3475 /* A constant address in TO_RTX can have VOIDmode, we must not try
3476 to call force_reg for that case. Avoid that case. */
3478 && GET_MODE (to_rtx) == BLKmode
3479 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3481 && (bitpos % bitsize) == 0
3482 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3483 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3485 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3489 to_rtx = offset_address (to_rtx, offset_rtx,
3490 highest_pow2_factor_for_target (to,
3496 /* If the field is at offset zero, we could have been given the
3497 DECL_RTX of the parent struct. Don't munge it. */
3498 to_rtx = shallow_copy_rtx (to_rtx);
3500 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3503 /* Deal with volatile and readonly fields. The former is only done
3504 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3505 if (volatilep && MEM_P (to_rtx))
3507 if (to_rtx == orig_to_rtx)
3508 to_rtx = copy_rtx (to_rtx);
3509 MEM_VOLATILE_P (to_rtx) = 1;
3512 if (TREE_CODE (to) == COMPONENT_REF
3513 && TREE_READONLY (TREE_OPERAND (to, 1))
3514 /* We can't assert that a MEM won't be set more than once
3515 if the component is not addressable because another
3516 non-addressable component may be referenced by the same MEM. */
3517 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3519 if (to_rtx == orig_to_rtx)
3520 to_rtx = copy_rtx (to_rtx);
3521 RTX_UNCHANGING_P (to_rtx) = 1;
3524 if (MEM_P (to_rtx) && ! can_address_p (to))
3526 if (to_rtx == orig_to_rtx)
3527 to_rtx = copy_rtx (to_rtx);
3528 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3531 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3533 while (0 && mode1 == VOIDmode && !want_value
3534 && bitpos + bitsize <= BITS_PER_WORD
3535 && bitsize < BITS_PER_WORD
3536 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3537 && !TREE_SIDE_EFFECTS (to)
3538 && !TREE_THIS_VOLATILE (to))
3542 HOST_WIDE_INT count = bitpos;
3547 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3548 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3551 op0 = TREE_OPERAND (src, 0);
3552 op1 = TREE_OPERAND (src, 1);
3555 if (! operand_equal_p (to, op0, 0))
3558 if (BYTES_BIG_ENDIAN)
3559 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3561 /* Special case some bitfield op= exp. */
3562 switch (TREE_CODE (src))
3569 /* For now, just optimize the case of the topmost bitfield
3570 where we don't need to do any masking and also
3571 1 bit bitfields where xor can be used.
3572 We might win by one instruction for the other bitfields
3573 too if insv/extv instructions aren't used, so that
3574 can be added later. */
3575 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3576 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3578 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3579 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3581 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3583 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3587 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3588 value, build_int_2 (count, 0),
3590 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3591 value, to_rtx, 1, OPTAB_WIDEN);
3592 if (result != to_rtx)
3593 emit_move_insn (to_rtx, result);
3604 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3606 /* Spurious cast for HPUX compiler. */
3607 ? ((enum machine_mode)
3608 TYPE_MODE (TREE_TYPE (to)))
3610 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3612 preserve_temp_slots (result);
3616 /* If the value is meaningful, convert RESULT to the proper mode.
3617 Otherwise, return nothing. */
3618 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3619 TYPE_MODE (TREE_TYPE (from)),
3621 TYPE_UNSIGNED (TREE_TYPE (to)))
3625 /* If the rhs is a function call and its value is not an aggregate,
3626 call the function before we start to compute the lhs.
3627 This is needed for correct code for cases such as
3628 val = setjmp (buf) on machines where reference to val
3629 requires loading up part of an address in a separate insn.
3631 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3632 since it might be a promoted variable where the zero- or sign- extension
3633 needs to be done. Handling this in the normal way is safe because no
3634 computation is done before the call. */
3635 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3636 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3637 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3638 && REG_P (DECL_RTL (to))))
3643 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3645 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3647 /* Handle calls that return values in multiple non-contiguous locations.
3648 The Irix 6 ABI has examples of this. */
3649 if (GET_CODE (to_rtx) == PARALLEL)
3650 emit_group_load (to_rtx, value, TREE_TYPE (from),
3651 int_size_in_bytes (TREE_TYPE (from)));
3652 else if (GET_MODE (to_rtx) == BLKmode)
3653 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3656 if (POINTER_TYPE_P (TREE_TYPE (to)))
3657 value = convert_memory_address (GET_MODE (to_rtx), value);
3658 emit_move_insn (to_rtx, value);
3660 preserve_temp_slots (to_rtx);
3663 return want_value ? to_rtx : NULL_RTX;
3666 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3667 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3670 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3672 /* Don't move directly into a return register. */
3673 if (TREE_CODE (to) == RESULT_DECL
3674 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3679 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3681 if (GET_CODE (to_rtx) == PARALLEL)
3682 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3683 int_size_in_bytes (TREE_TYPE (from)));
3685 emit_move_insn (to_rtx, temp);
3687 preserve_temp_slots (to_rtx);
3690 return want_value ? to_rtx : NULL_RTX;
3693 /* In case we are returning the contents of an object which overlaps
3694 the place the value is being stored, use a safe function when copying
3695 a value through a pointer into a structure value return block. */
3696 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3697 && current_function_returns_struct
3698 && !current_function_returns_pcc_struct)
3703 size = expr_size (from);
3704 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3706 emit_library_call (memmove_libfunc, LCT_NORMAL,
3707 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3708 XEXP (from_rtx, 0), Pmode,
3709 convert_to_mode (TYPE_MODE (sizetype),
3710 size, TYPE_UNSIGNED (sizetype)),
3711 TYPE_MODE (sizetype));
3713 preserve_temp_slots (to_rtx);
3716 return want_value ? to_rtx : NULL_RTX;
3719 /* Compute FROM and store the value in the rtx we got. */
3722 result = store_expr (from, to_rtx, want_value);
3723 preserve_temp_slots (result);
3726 return want_value ? result : NULL_RTX;
3729 /* Generate code for computing expression EXP,
3730 and storing the value into TARGET.
3732 If WANT_VALUE & 1 is nonzero, return a copy of the value
3733 not in TARGET, so that we can be sure to use the proper
3734 value in a containing expression even if TARGET has something
3735 else stored in it. If possible, we copy the value through a pseudo
3736 and return that pseudo. Or, if the value is constant, we try to
3737 return the constant. In some cases, we return a pseudo
3738 copied *from* TARGET.
3740 If the mode is BLKmode then we may return TARGET itself.
3741 It turns out that in BLKmode it doesn't cause a problem.
3742 because C has no operators that could combine two different
3743 assignments into the same BLKmode object with different values
3744 with no sequence point. Will other languages need this to
3747 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3748 to catch quickly any cases where the caller uses the value
3749 and fails to set WANT_VALUE.
3751 If WANT_VALUE & 2 is set, this is a store into a call param on the
3752 stack, and block moves may need to be treated specially. */
3755 store_expr (tree exp, rtx target, int want_value)
3758 rtx alt_rtl = NULL_RTX;
3759 int dont_return_target = 0;
3760 int dont_store_target = 0;
3762 if (VOID_TYPE_P (TREE_TYPE (exp)))
3764 /* C++ can generate ?: expressions with a throw expression in one
3765 branch and an rvalue in the other. Here, we resolve attempts to
3766 store the throw expression's nonexistent result. */
3769 expand_expr (exp, const0_rtx, VOIDmode, 0);
3772 if (TREE_CODE (exp) == COMPOUND_EXPR)
3774 /* Perform first part of compound expression, then assign from second
3776 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3777 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3778 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3780 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3782 /* For conditional expression, get safe form of the target. Then
3783 test the condition, doing the appropriate assignment on either
3784 side. This avoids the creation of unnecessary temporaries.
3785 For non-BLKmode, it is more efficient not to do this. */
3787 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3789 do_pending_stack_adjust ();
3791 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3792 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3793 emit_jump_insn (gen_jump (lab2));
3796 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3800 return want_value & 1 ? target : NULL_RTX;
3802 else if ((want_value & 1) != 0
3804 && ! MEM_VOLATILE_P (target)
3805 && GET_MODE (target) != BLKmode)
3806 /* If target is in memory and caller wants value in a register instead,
3807 arrange that. Pass TARGET as target for expand_expr so that,
3808 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3809 We know expand_expr will not use the target in that case.
3810 Don't do this if TARGET is volatile because we are supposed
3811 to write it and then read it. */
3813 temp = expand_expr (exp, target, GET_MODE (target),
3814 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3815 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3817 /* If TEMP is already in the desired TARGET, only copy it from
3818 memory and don't store it there again. */
3820 || (rtx_equal_p (temp, target)
3821 && ! side_effects_p (temp) && ! side_effects_p (target)))
3822 dont_store_target = 1;
3823 temp = copy_to_reg (temp);
3825 dont_return_target = 1;
3827 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3828 /* If this is a scalar in a register that is stored in a wider mode
3829 than the declared mode, compute the result into its declared mode
3830 and then convert to the wider mode. Our value is the computed
3833 rtx inner_target = 0;
3835 /* If we don't want a value, we can do the conversion inside EXP,
3836 which will often result in some optimizations. Do the conversion
3837 in two steps: first change the signedness, if needed, then
3838 the extend. But don't do this if the type of EXP is a subtype
3839 of something else since then the conversion might involve
3840 more than just converting modes. */
3841 if ((want_value & 1) == 0
3842 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3843 && TREE_TYPE (TREE_TYPE (exp)) == 0
3844 && (!lang_hooks.reduce_bit_field_operations
3845 || (GET_MODE_PRECISION (GET_MODE (target))
3846 == TYPE_PRECISION (TREE_TYPE (exp)))))
3848 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3849 != SUBREG_PROMOTED_UNSIGNED_P (target))
3851 (lang_hooks.types.signed_or_unsigned_type
3852 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3854 exp = convert (lang_hooks.types.type_for_mode
3855 (GET_MODE (SUBREG_REG (target)),
3856 SUBREG_PROMOTED_UNSIGNED_P (target)),
3859 inner_target = SUBREG_REG (target);
3862 temp = expand_expr (exp, inner_target, VOIDmode,
3863 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3865 /* If TEMP is a MEM and we want a result value, make the access
3866 now so it gets done only once. Strictly speaking, this is
3867 only necessary if the MEM is volatile, or if the address
3868 overlaps TARGET. But not performing the load twice also
3869 reduces the amount of rtl we generate and then have to CSE. */
3870 if (MEM_P (temp) && (want_value & 1) != 0)
3871 temp = copy_to_reg (temp);
3873 /* If TEMP is a VOIDmode constant, use convert_modes to make
3874 sure that we properly convert it. */
3875 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3877 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3878 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3879 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3880 GET_MODE (target), temp,
3881 SUBREG_PROMOTED_UNSIGNED_P (target));
3884 convert_move (SUBREG_REG (target), temp,
3885 SUBREG_PROMOTED_UNSIGNED_P (target));
3887 /* If we promoted a constant, change the mode back down to match
3888 target. Otherwise, the caller might get confused by a result whose
3889 mode is larger than expected. */
3891 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3893 if (GET_MODE (temp) != VOIDmode)
3895 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3896 SUBREG_PROMOTED_VAR_P (temp) = 1;
3897 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3898 SUBREG_PROMOTED_UNSIGNED_P (target));
3901 temp = convert_modes (GET_MODE (target),
3902 GET_MODE (SUBREG_REG (target)),
3903 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3906 return want_value & 1 ? temp : NULL_RTX;
3910 temp = expand_expr_real (exp, target, GET_MODE (target),
3912 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3914 /* Return TARGET if it's a specified hardware register.
3915 If TARGET is a volatile mem ref, either return TARGET
3916 or return a reg copied *from* TARGET; ANSI requires this.
3918 Otherwise, if TEMP is not TARGET, return TEMP
3919 if it is constant (for efficiency),
3920 or if we really want the correct value. */
3921 if (!(target && REG_P (target)
3922 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3923 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3924 && ! rtx_equal_p (temp, target)
3925 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3926 dont_return_target = 1;
3929 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3930 the same as that of TARGET, adjust the constant. This is needed, for
3931 example, in case it is a CONST_DOUBLE and we want only a word-sized
3933 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3934 && TREE_CODE (exp) != ERROR_MARK
3935 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3936 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3937 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3939 /* If value was not generated in the target, store it there.
3940 Convert the value to TARGET's type first if necessary and emit the
3941 pending incrementations that have been queued when expanding EXP.
3942 Note that we cannot emit the whole queue blindly because this will
3943 effectively disable the POST_INC optimization later.
3945 If TEMP and TARGET compare equal according to rtx_equal_p, but
3946 one or both of them are volatile memory refs, we have to distinguish
3948 - expand_expr has used TARGET. In this case, we must not generate
3949 another copy. This can be detected by TARGET being equal according
3951 - expand_expr has not used TARGET - that means that the source just
3952 happens to have the same RTX form. Since temp will have been created
3953 by expand_expr, it will compare unequal according to == .
3954 We must generate a copy in this case, to reach the correct number
3955 of volatile memory references. */
3957 if ((! rtx_equal_p (temp, target)
3958 || (temp != target && (side_effects_p (temp)
3959 || side_effects_p (target))))
3960 && TREE_CODE (exp) != ERROR_MARK
3961 && ! dont_store_target
3962 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3963 but TARGET is not valid memory reference, TEMP will differ
3964 from TARGET although it is really the same location. */
3965 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3966 /* If there's nothing to copy, don't bother. Don't call expr_size
3967 unless necessary, because some front-ends (C++) expr_size-hook
3968 aborts on objects that are not supposed to be bit-copied or
3970 && expr_size (exp) != const0_rtx)
3972 if (GET_MODE (temp) != GET_MODE (target)
3973 && GET_MODE (temp) != VOIDmode)
3975 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3976 if (dont_return_target)
3978 /* In this case, we will return TEMP,
3979 so make sure it has the proper mode.
3980 But don't forget to store the value into TARGET. */
3981 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3982 emit_move_insn (target, temp);
3985 convert_move (target, temp, unsignedp);
3988 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3990 /* Handle copying a string constant into an array. The string
3991 constant may be shorter than the array. So copy just the string's
3992 actual length, and clear the rest. First get the size of the data
3993 type of the string, which is actually the size of the target. */
3994 rtx size = expr_size (exp);
3996 if (GET_CODE (size) == CONST_INT
3997 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3998 emit_block_move (target, temp, size,
4000 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4003 /* Compute the size of the data to copy from the string. */
4005 = size_binop (MIN_EXPR,
4006 make_tree (sizetype, size),
4007 size_int (TREE_STRING_LENGTH (exp)));
4009 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4011 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4014 /* Copy that much. */
4015 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4016 TYPE_UNSIGNED (sizetype));
4017 emit_block_move (target, temp, copy_size_rtx,
4019 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4021 /* Figure out how much is left in TARGET that we have to clear.
4022 Do all calculations in ptr_mode. */
4023 if (GET_CODE (copy_size_rtx) == CONST_INT)
4025 size = plus_constant (size, -INTVAL (copy_size_rtx));
4026 target = adjust_address (target, BLKmode,
4027 INTVAL (copy_size_rtx));
4031 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4032 copy_size_rtx, NULL_RTX, 0,
4035 #ifdef POINTERS_EXTEND_UNSIGNED
4036 if (GET_MODE (copy_size_rtx) != Pmode)
4037 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4038 TYPE_UNSIGNED (sizetype));
4041 target = offset_address (target, copy_size_rtx,
4042 highest_pow2_factor (copy_size));
4043 label = gen_label_rtx ();
4044 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4045 GET_MODE (size), 0, label);
4048 if (size != const0_rtx)
4049 clear_storage (target, size);
4055 /* Handle calls that return values in multiple non-contiguous locations.
4056 The Irix 6 ABI has examples of this. */
4057 else if (GET_CODE (target) == PARALLEL)
4058 emit_group_load (target, temp, TREE_TYPE (exp),
4059 int_size_in_bytes (TREE_TYPE (exp)));
4060 else if (GET_MODE (temp) == BLKmode)
4061 emit_block_move (target, temp, expr_size (exp),
4063 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4066 temp = force_operand (temp, target);
4068 emit_move_insn (target, temp);
4072 /* If we don't want a value, return NULL_RTX. */
4073 if ((want_value & 1) == 0)
4076 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4077 ??? The latter test doesn't seem to make sense. */
4078 else if (dont_return_target && !MEM_P (temp))
4081 /* Return TARGET itself if it is a hard register. */
4082 else if ((want_value & 1) != 0
4083 && GET_MODE (target) != BLKmode
4084 && ! (REG_P (target)
4085 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4086 return copy_to_reg (target);
4092 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4093 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4094 are set to non-constant values and place it in *P_NC_ELTS. */
4097 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4098 HOST_WIDE_INT *p_nc_elts)
4100 HOST_WIDE_INT nz_elts, nc_elts;
4106 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4108 tree value = TREE_VALUE (list);
4109 tree purpose = TREE_PURPOSE (list);
4113 if (TREE_CODE (purpose) == RANGE_EXPR)
4115 tree lo_index = TREE_OPERAND (purpose, 0);
4116 tree hi_index = TREE_OPERAND (purpose, 1);
4118 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4119 mult = (tree_low_cst (hi_index, 1)
4120 - tree_low_cst (lo_index, 1) + 1);
4123 switch (TREE_CODE (value))
4127 HOST_WIDE_INT nz = 0, nc = 0;
4128 categorize_ctor_elements_1 (value, &nz, &nc);
4129 nz_elts += mult * nz;
4130 nc_elts += mult * nc;
4136 if (!initializer_zerop (value))
4140 if (!initializer_zerop (TREE_REALPART (value)))
4142 if (!initializer_zerop (TREE_IMAGPART (value)))
4148 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4149 if (!initializer_zerop (TREE_VALUE (v)))
4156 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4162 *p_nz_elts += nz_elts;
4163 *p_nc_elts += nc_elts;
4167 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4168 HOST_WIDE_INT *p_nc_elts)
4172 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4175 /* Count the number of scalars in TYPE. Return -1 on overflow or
4179 count_type_elements (tree type)
4181 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4182 switch (TREE_CODE (type))
4186 tree telts = array_type_nelts (type);
4187 if (telts && host_integerp (telts, 1))
4189 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4190 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4193 else if (max / n > m)
4201 HOST_WIDE_INT n = 0, t;
4204 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4205 if (TREE_CODE (f) == FIELD_DECL)
4207 t = count_type_elements (TREE_TYPE (f));
4217 case QUAL_UNION_TYPE:
4219 /* Ho hum. How in the world do we guess here? Clearly it isn't
4220 right to count the fields. Guess based on the number of words. */
4221 HOST_WIDE_INT n = int_size_in_bytes (type);
4224 return n / UNITS_PER_WORD;
4231 /* ??? This is broke. We should encode the vector width in the tree. */
4232 return GET_MODE_NUNITS (TYPE_MODE (type));
4241 case REFERENCE_TYPE:
4255 /* Return 1 if EXP contains mostly (3/4) zeros. */
4258 mostly_zeros_p (tree exp)
4260 if (TREE_CODE (exp) == CONSTRUCTOR)
4263 HOST_WIDE_INT nz_elts, nc_elts, elts;
4265 /* If there are no ranges of true bits, it is all zero. */
4266 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4267 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4269 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4270 elts = count_type_elements (TREE_TYPE (exp));
4272 return nz_elts < elts / 4;
4275 return initializer_zerop (exp);
4278 /* Helper function for store_constructor.
4279 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4280 TYPE is the type of the CONSTRUCTOR, not the element type.
4281 CLEARED is as for store_constructor.
4282 ALIAS_SET is the alias set to use for any stores.
4284 This provides a recursive shortcut back to store_constructor when it isn't
4285 necessary to go through store_field. This is so that we can pass through
4286 the cleared field to let store_constructor know that we may not have to
4287 clear a substructure if the outer structure has already been cleared. */
4290 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4291 HOST_WIDE_INT bitpos, enum machine_mode mode,
4292 tree exp, tree type, int cleared, int alias_set)
4294 if (TREE_CODE (exp) == CONSTRUCTOR
4295 /* We can only call store_constructor recursively if the size and
4296 bit position are on a byte boundary. */
4297 && bitpos % BITS_PER_UNIT == 0
4298 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4299 /* If we have a nonzero bitpos for a register target, then we just
4300 let store_field do the bitfield handling. This is unlikely to
4301 generate unnecessary clear instructions anyways. */
4302 && (bitpos == 0 || MEM_P (target)))
4306 = adjust_address (target,
4307 GET_MODE (target) == BLKmode
4309 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4310 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4313 /* Update the alias set, if required. */
4314 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4315 && MEM_ALIAS_SET (target) != 0)
4317 target = copy_rtx (target);
4318 set_mem_alias_set (target, alias_set);
4321 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4324 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4328 /* Store the value of constructor EXP into the rtx TARGET.
4329 TARGET is either a REG or a MEM; we know it cannot conflict, since
4330 safe_from_p has been called.
4331 CLEARED is true if TARGET is known to have been zero'd.
4332 SIZE is the number of bytes of TARGET we are allowed to modify: this
4333 may not be the same as the size of EXP if we are assigning to a field
4334 which has been packed to exclude padding bits. */
4337 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4339 tree type = TREE_TYPE (exp);
4340 #ifdef WORD_REGISTER_OPERATIONS
4341 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4344 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4345 || TREE_CODE (type) == QUAL_UNION_TYPE)
4349 /* If size is zero or the target is already cleared, do nothing. */
4350 if (size == 0 || cleared)
4352 /* We either clear the aggregate or indicate the value is dead. */
4353 else if ((TREE_CODE (type) == UNION_TYPE
4354 || TREE_CODE (type) == QUAL_UNION_TYPE)
4355 && ! CONSTRUCTOR_ELTS (exp))
4356 /* If the constructor is empty, clear the union. */
4358 clear_storage (target, expr_size (exp));
4362 /* If we are building a static constructor into a register,
4363 set the initial value as zero so we can fold the value into
4364 a constant. But if more than one register is involved,
4365 this probably loses. */
4366 else if (REG_P (target) && TREE_STATIC (exp)
4367 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4369 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4373 /* If the constructor has fewer fields than the structure
4374 or if we are initializing the structure to mostly zeros,
4375 clear the whole structure first. Don't do this if TARGET is a
4376 register whose mode size isn't equal to SIZE since clear_storage
4377 can't handle this case. */
4379 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4380 || mostly_zeros_p (exp))
4382 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4385 rtx xtarget = target;
4387 if (readonly_fields_p (type))
4389 xtarget = copy_rtx (xtarget);
4390 RTX_UNCHANGING_P (xtarget) = 1;
4393 clear_storage (xtarget, GEN_INT (size));
4398 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4400 /* Store each element of the constructor into
4401 the corresponding field of TARGET. */
4403 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4405 tree field = TREE_PURPOSE (elt);
4406 tree value = TREE_VALUE (elt);
4407 enum machine_mode mode;
4408 HOST_WIDE_INT bitsize;
4409 HOST_WIDE_INT bitpos = 0;
4411 rtx to_rtx = target;
4413 /* Just ignore missing fields.
4414 We cleared the whole structure, above,
4415 if any fields are missing. */
4419 if (cleared && initializer_zerop (value))
4422 if (host_integerp (DECL_SIZE (field), 1))
4423 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4427 mode = DECL_MODE (field);
4428 if (DECL_BIT_FIELD (field))
4431 offset = DECL_FIELD_OFFSET (field);
4432 if (host_integerp (offset, 0)
4433 && host_integerp (bit_position (field), 0))
4435 bitpos = int_bit_position (field);
4439 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4446 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4447 make_tree (TREE_TYPE (exp),
4450 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4451 if (!MEM_P (to_rtx))
4454 #ifdef POINTERS_EXTEND_UNSIGNED
4455 if (GET_MODE (offset_rtx) != Pmode)
4456 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4458 if (GET_MODE (offset_rtx) != ptr_mode)
4459 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4462 to_rtx = offset_address (to_rtx, offset_rtx,
4463 highest_pow2_factor (offset));
4466 if (TREE_READONLY (field))
4469 to_rtx = copy_rtx (to_rtx);
4471 RTX_UNCHANGING_P (to_rtx) = 1;
4474 #ifdef WORD_REGISTER_OPERATIONS
4475 /* If this initializes a field that is smaller than a word, at the
4476 start of a word, try to widen it to a full word.
4477 This special case allows us to output C++ member function
4478 initializations in a form that the optimizers can understand. */
4480 && bitsize < BITS_PER_WORD
4481 && bitpos % BITS_PER_WORD == 0
4482 && GET_MODE_CLASS (mode) == MODE_INT
4483 && TREE_CODE (value) == INTEGER_CST
4485 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4487 tree type = TREE_TYPE (value);
4489 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4491 type = lang_hooks.types.type_for_size
4492 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4493 value = convert (type, value);
4496 if (BYTES_BIG_ENDIAN)
4498 = fold (build (LSHIFT_EXPR, type, value,
4499 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4500 bitsize = BITS_PER_WORD;
4505 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4506 && DECL_NONADDRESSABLE_P (field))
4508 to_rtx = copy_rtx (to_rtx);
4509 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4512 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4513 value, type, cleared,
4514 get_alias_set (TREE_TYPE (field)));
4517 else if (TREE_CODE (type) == ARRAY_TYPE
4518 || TREE_CODE (type) == VECTOR_TYPE)
4524 tree elttype = TREE_TYPE (type);
4526 HOST_WIDE_INT minelt = 0;
4527 HOST_WIDE_INT maxelt = 0;
4531 unsigned n_elts = 0;
4533 if (TREE_CODE (type) == ARRAY_TYPE)
4534 domain = TYPE_DOMAIN (type);
4536 /* Vectors do not have domains; look up the domain of
4537 the array embedded in the debug representation type.
4538 FIXME Would probably be more efficient to treat vectors
4539 separately from arrays. */
4541 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4542 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4543 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4545 enum machine_mode mode = GET_MODE (target);
4547 icode = (int) vec_init_optab->handlers[mode].insn_code;
4548 if (icode != CODE_FOR_nothing)
4552 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4553 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4554 vector = alloca (n_elts);
4555 for (i = 0; i < n_elts; i++)
4556 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4561 const_bounds_p = (TYPE_MIN_VALUE (domain)
4562 && TYPE_MAX_VALUE (domain)
4563 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4564 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4566 /* If we have constant bounds for the range of the type, get them. */
4569 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4570 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4573 /* If the constructor has fewer elements than the array,
4574 clear the whole array first. Similarly if this is
4575 static constructor of a non-BLKmode object. */
4576 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4580 HOST_WIDE_INT count = 0, zero_count = 0;
4581 need_to_clear = ! const_bounds_p;
4583 /* This loop is a more accurate version of the loop in
4584 mostly_zeros_p (it handles RANGE_EXPR in an index).
4585 It is also needed to check for missing elements. */
4586 for (elt = CONSTRUCTOR_ELTS (exp);
4587 elt != NULL_TREE && ! need_to_clear;
4588 elt = TREE_CHAIN (elt))
4590 tree index = TREE_PURPOSE (elt);
4591 HOST_WIDE_INT this_node_count;
4593 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4595 tree lo_index = TREE_OPERAND (index, 0);
4596 tree hi_index = TREE_OPERAND (index, 1);
4598 if (! host_integerp (lo_index, 1)
4599 || ! host_integerp (hi_index, 1))
4605 this_node_count = (tree_low_cst (hi_index, 1)
4606 - tree_low_cst (lo_index, 1) + 1);
4609 this_node_count = 1;
4611 count += this_node_count;
4612 if (mostly_zeros_p (TREE_VALUE (elt)))
4613 zero_count += this_node_count;
4616 /* Clear the entire array first if there are any missing elements,
4617 or if the incidence of zero elements is >= 75%. */
4619 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4623 if (need_to_clear && size > 0 && !vector)
4628 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4630 clear_storage (target, GEN_INT (size));
4634 else if (REG_P (target))
4635 /* Inform later passes that the old value is dead. */
4636 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4638 /* Store each element of the constructor into
4639 the corresponding element of TARGET, determined
4640 by counting the elements. */
4641 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4643 elt = TREE_CHAIN (elt), i++)
4645 enum machine_mode mode;
4646 HOST_WIDE_INT bitsize;
4647 HOST_WIDE_INT bitpos;
4649 tree value = TREE_VALUE (elt);
4650 tree index = TREE_PURPOSE (elt);
4651 rtx xtarget = target;
4653 if (cleared && initializer_zerop (value))
4656 unsignedp = TYPE_UNSIGNED (elttype);
4657 mode = TYPE_MODE (elttype);
4658 if (mode == BLKmode)
4659 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4660 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4663 bitsize = GET_MODE_BITSIZE (mode);
4665 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4667 tree lo_index = TREE_OPERAND (index, 0);
4668 tree hi_index = TREE_OPERAND (index, 1);
4669 rtx index_r, pos_rtx;
4670 HOST_WIDE_INT lo, hi, count;
4676 /* If the range is constant and "small", unroll the loop. */
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4695 && !MEM_KEEP_ALIAS_SET_P (target)
4696 && TREE_CODE (type) == ARRAY_TYPE
4697 && TYPE_NONALIASED_COMPONENT (type))
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
4710 rtx loop_start = gen_label_rtx ();
4711 rtx loop_end = gen_label_rtx ();
4714 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4715 unsignedp = TYPE_UNSIGNED (domain);
4717 index = build_decl (VAR_DECL, NULL_TREE, domain);
4720 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4722 SET_DECL_RTL (index, index_r);
4723 store_expr (lo_index, index_r, 0);
4725 /* Build the head of the loop. */
4726 do_pending_stack_adjust ();
4727 emit_label (loop_start);
4729 /* Assign value to element index. */
4731 = convert (ssizetype,
4732 fold (build (MINUS_EXPR, TREE_TYPE (index),
4733 index, TYPE_MIN_VALUE (domain))));
4734 position = size_binop (MULT_EXPR, position,
4736 TYPE_SIZE_UNIT (elttype)));
4738 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4739 xtarget = offset_address (target, pos_rtx,
4740 highest_pow2_factor (position));
4741 xtarget = adjust_address (xtarget, mode, 0);
4742 if (TREE_CODE (value) == CONSTRUCTOR)
4743 store_constructor (value, xtarget, cleared,
4744 bitsize / BITS_PER_UNIT);
4746 store_expr (value, xtarget, 0);
4748 /* Generate a conditional jump to exit the loop. */
4749 exit_cond = build (LT_EXPR, integer_type_node,
4751 jumpif (exit_cond, loop_end);
4753 /* Update the loop counter, and jump to the head of
4755 expand_assignment (index,
4756 build2 (PLUS_EXPR, TREE_TYPE (index),
4757 index, integer_one_node), 0);
4759 emit_jump (loop_start);
4761 /* Build the end of the loop. */
4762 emit_label (loop_end);
4765 else if ((index != 0 && ! host_integerp (index, 0))
4766 || ! host_integerp (TYPE_SIZE (elttype), 1))
4774 index = ssize_int (1);
4777 index = convert (ssizetype,
4778 fold (build (MINUS_EXPR, index,
4779 TYPE_MIN_VALUE (domain))));
4781 position = size_binop (MULT_EXPR, index,
4783 TYPE_SIZE_UNIT (elttype)));
4784 xtarget = offset_address (target,
4785 expand_expr (position, 0, VOIDmode, 0),
4786 highest_pow2_factor (position));
4787 xtarget = adjust_address (xtarget, mode, 0);
4788 store_expr (value, xtarget, 0);
4795 pos = tree_low_cst (index, 0) - minelt;
4798 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4803 bitpos = ((tree_low_cst (index, 0) - minelt)
4804 * tree_low_cst (TYPE_SIZE (elttype), 1));
4806 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4808 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4809 && TREE_CODE (type) == ARRAY_TYPE
4810 && TYPE_NONALIASED_COMPONENT (type))
4812 target = copy_rtx (target);
4813 MEM_KEEP_ALIAS_SET_P (target) = 1;
4815 store_constructor_field (target, bitsize, bitpos, mode, value,
4816 type, cleared, get_alias_set (elttype));
4821 emit_insn (GEN_FCN (icode) (target,
4822 gen_rtx_PARALLEL (GET_MODE (target),
4823 gen_rtvec_v (n_elts, vector))));
4827 /* Set constructor assignments. */
4828 else if (TREE_CODE (type) == SET_TYPE)
4830 tree elt = CONSTRUCTOR_ELTS (exp);
4831 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4832 tree domain = TYPE_DOMAIN (type);
4833 tree domain_min, domain_max, bitlength;
4835 /* The default implementation strategy is to extract the constant
4836 parts of the constructor, use that to initialize the target,
4837 and then "or" in whatever non-constant ranges we need in addition.
4839 If a large set is all zero or all ones, it is
4840 probably better to set it using memset.
4841 Also, if a large set has just a single range, it may also be
4842 better to first clear all the first clear the set (using
4843 memset), and set the bits we want. */
4845 /* Check for all zeros. */
4846 if (elt == NULL_TREE && size > 0)
4849 clear_storage (target, GEN_INT (size));
4853 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4854 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4855 bitlength = size_binop (PLUS_EXPR,
4856 size_diffop (domain_max, domain_min),
4859 nbits = tree_low_cst (bitlength, 1);
4861 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4862 are "complicated" (more than one range), initialize (the
4863 constant parts) by copying from a constant. */
4864 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4865 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4867 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4868 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4869 char *bit_buffer = alloca (nbits);
4870 HOST_WIDE_INT word = 0;
4871 unsigned int bit_pos = 0;
4872 unsigned int ibit = 0;
4873 unsigned int offset = 0; /* In bytes from beginning of set. */
4875 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4878 if (bit_buffer[ibit])
4880 if (BYTES_BIG_ENDIAN)
4881 word |= (1 << (set_word_size - 1 - bit_pos));
4883 word |= 1 << bit_pos;
4887 if (bit_pos >= set_word_size || ibit == nbits)
4889 if (word != 0 || ! cleared)
4891 rtx datum = gen_int_mode (word, mode);
4894 /* The assumption here is that it is safe to use
4895 XEXP if the set is multi-word, but not if
4896 it's single-word. */
4898 to_rtx = adjust_address (target, mode, offset);
4899 else if (offset == 0)
4903 emit_move_insn (to_rtx, datum);
4910 offset += set_word_size / BITS_PER_UNIT;
4915 /* Don't bother clearing storage if the set is all ones. */
4916 if (TREE_CHAIN (elt) != NULL_TREE
4917 || (TREE_PURPOSE (elt) == NULL_TREE
4919 : ( ! host_integerp (TREE_VALUE (elt), 0)
4920 || ! host_integerp (TREE_PURPOSE (elt), 0)
4921 || (tree_low_cst (TREE_VALUE (elt), 0)
4922 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4923 != (HOST_WIDE_INT) nbits))))
4924 clear_storage (target, expr_size (exp));
4926 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4928 /* Start of range of element or NULL. */
4929 tree startbit = TREE_PURPOSE (elt);
4930 /* End of range of element, or element value. */
4931 tree endbit = TREE_VALUE (elt);
4932 HOST_WIDE_INT startb, endb;
4933 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4935 bitlength_rtx = expand_expr (bitlength,
4936 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4938 /* Handle non-range tuple element like [ expr ]. */
4939 if (startbit == NULL_TREE)
4941 startbit = save_expr (endbit);
4945 startbit = convert (sizetype, startbit);
4946 endbit = convert (sizetype, endbit);
4947 if (! integer_zerop (domain_min))
4949 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4950 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4952 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4953 EXPAND_CONST_ADDRESS);
4954 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4955 EXPAND_CONST_ADDRESS);
4961 ((build_qualified_type (lang_hooks.types.type_for_mode
4962 (GET_MODE (target), 0),
4965 emit_move_insn (targetx, target);
4968 else if (MEM_P (target))
4973 /* Optimization: If startbit and endbit are constants divisible
4974 by BITS_PER_UNIT, call memset instead. */
4975 if (TREE_CODE (startbit) == INTEGER_CST
4976 && TREE_CODE (endbit) == INTEGER_CST
4977 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4978 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4980 emit_library_call (memset_libfunc, LCT_NORMAL,
4982 plus_constant (XEXP (targetx, 0),
4983 startb / BITS_PER_UNIT),
4985 constm1_rtx, TYPE_MODE (integer_type_node),
4986 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4987 TYPE_MODE (sizetype));
4990 emit_library_call (setbits_libfunc, LCT_NORMAL,
4991 VOIDmode, 4, XEXP (targetx, 0),
4992 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4993 startbit_rtx, TYPE_MODE (sizetype),
4994 endbit_rtx, TYPE_MODE (sizetype));
4997 emit_move_insn (target, targetx);
5005 /* Store the value of EXP (an expression tree)
5006 into a subfield of TARGET which has mode MODE and occupies
5007 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5008 If MODE is VOIDmode, it means that we are storing into a bit-field.
5010 If VALUE_MODE is VOIDmode, return nothing in particular.
5011 UNSIGNEDP is not used in this case.
5013 Otherwise, return an rtx for the value stored. This rtx
5014 has mode VALUE_MODE if that is convenient to do.
5015 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5017 TYPE is the type of the underlying object,
5019 ALIAS_SET is the alias set for the destination. This value will
5020 (in general) be different from that for TARGET, since TARGET is a
5021 reference to the containing structure. */
5024 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5025 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5026 int unsignedp, tree type, int alias_set)
5028 HOST_WIDE_INT width_mask = 0;
5030 if (TREE_CODE (exp) == ERROR_MARK)
5033 /* If we have nothing to store, do nothing unless the expression has
5036 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5037 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5038 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5040 /* If we are storing into an unaligned field of an aligned union that is
5041 in a register, we may have the mode of TARGET being an integer mode but
5042 MODE == BLKmode. In that case, get an aligned object whose size and
5043 alignment are the same as TARGET and store TARGET into it (we can avoid
5044 the store if the field being stored is the entire width of TARGET). Then
5045 call ourselves recursively to store the field into a BLKmode version of
5046 that object. Finally, load from the object into TARGET. This is not
5047 very efficient in general, but should only be slightly more expensive
5048 than the otherwise-required unaligned accesses. Perhaps this can be
5049 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5050 twice, once with emit_move_insn and once via store_field. */
5053 && (REG_P (target) || GET_CODE (target) == SUBREG))
5055 rtx object = assign_temp (type, 0, 1, 1);
5056 rtx blk_object = adjust_address (object, BLKmode, 0);
5058 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5059 emit_move_insn (object, target);
5061 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5064 emit_move_insn (target, object);
5066 /* We want to return the BLKmode version of the data. */
5070 if (GET_CODE (target) == CONCAT)
5072 /* We're storing into a struct containing a single __complex. */
5076 return store_expr (exp, target, value_mode != VOIDmode);
5079 /* If the structure is in a register or if the component
5080 is a bit field, we cannot use addressing to access it.
5081 Use bit-field techniques or SUBREG to store in it. */
5083 if (mode == VOIDmode
5084 || (mode != BLKmode && ! direct_store[(int) mode]
5085 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5086 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5088 || GET_CODE (target) == SUBREG
5089 /* If the field isn't aligned enough to store as an ordinary memref,
5090 store it as a bit field. */
5092 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5093 || bitpos % GET_MODE_ALIGNMENT (mode))
5094 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5095 || (bitpos % BITS_PER_UNIT != 0)))
5096 /* If the RHS and field are a constant size and the size of the
5097 RHS isn't the same size as the bitfield, we must use bitfield
5100 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5101 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5103 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5105 /* If BITSIZE is narrower than the size of the type of EXP
5106 we will be narrowing TEMP. Normally, what's wanted are the
5107 low-order bits. However, if EXP's type is a record and this is
5108 big-endian machine, we want the upper BITSIZE bits. */
5109 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5110 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5111 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5112 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5113 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5117 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5119 if (mode != VOIDmode && mode != BLKmode
5120 && mode != TYPE_MODE (TREE_TYPE (exp)))
5121 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5123 /* If the modes of TARGET and TEMP are both BLKmode, both
5124 must be in memory and BITPOS must be aligned on a byte
5125 boundary. If so, we simply do a block copy. */
5126 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5128 if (!MEM_P (target) || !MEM_P (temp)
5129 || bitpos % BITS_PER_UNIT != 0)
5132 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5133 emit_block_move (target, temp,
5134 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5138 return value_mode == VOIDmode ? const0_rtx : target;
5141 /* Store the value in the bitfield. */
5142 store_bit_field (target, bitsize, bitpos, mode, temp);
5144 if (value_mode != VOIDmode)
5146 /* The caller wants an rtx for the value.
5147 If possible, avoid refetching from the bitfield itself. */
5149 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5152 enum machine_mode tmode;
5154 tmode = GET_MODE (temp);
5155 if (tmode == VOIDmode)
5159 return expand_and (tmode, temp,
5160 gen_int_mode (width_mask, tmode),
5163 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5164 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5165 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5168 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5169 NULL_RTX, value_mode, VOIDmode);
5175 rtx addr = XEXP (target, 0);
5176 rtx to_rtx = target;
5178 /* If a value is wanted, it must be the lhs;
5179 so make the address stable for multiple use. */
5181 if (value_mode != VOIDmode && !REG_P (addr)
5182 && ! CONSTANT_ADDRESS_P (addr)
5183 /* A frame-pointer reference is already stable. */
5184 && ! (GET_CODE (addr) == PLUS
5185 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5186 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5187 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5188 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5190 /* Now build a reference to just the desired component. */
5192 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5194 if (to_rtx == target)
5195 to_rtx = copy_rtx (to_rtx);
5197 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5198 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5199 set_mem_alias_set (to_rtx, alias_set);
5201 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5205 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5206 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5207 codes and find the ultimate containing object, which we return.
5209 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5210 bit position, and *PUNSIGNEDP to the signedness of the field.
5211 If the position of the field is variable, we store a tree
5212 giving the variable offset (in units) in *POFFSET.
5213 This offset is in addition to the bit position.
5214 If the position is not variable, we store 0 in *POFFSET.
5216 If any of the extraction expressions is volatile,
5217 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5219 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5220 is a mode that can be used to access the field. In that case, *PBITSIZE
5223 If the field describes a variable-sized object, *PMODE is set to
5224 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5225 this case, but the address of the object can be found. */
5228 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5229 HOST_WIDE_INT *pbitpos, tree *poffset,
5230 enum machine_mode *pmode, int *punsignedp,
5234 enum machine_mode mode = VOIDmode;
5235 tree offset = size_zero_node;
5236 tree bit_offset = bitsize_zero_node;
5239 /* First get the mode, signedness, and size. We do this from just the
5240 outermost expression. */
5241 if (TREE_CODE (exp) == COMPONENT_REF)
5243 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5244 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5245 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5247 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5249 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5251 size_tree = TREE_OPERAND (exp, 1);
5252 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5256 mode = TYPE_MODE (TREE_TYPE (exp));
5257 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5259 if (mode == BLKmode)
5260 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5262 *pbitsize = GET_MODE_BITSIZE (mode);
5267 if (! host_integerp (size_tree, 1))
5268 mode = BLKmode, *pbitsize = -1;
5270 *pbitsize = tree_low_cst (size_tree, 1);
5273 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5274 and find the ultimate containing object. */
5277 if (TREE_CODE (exp) == BIT_FIELD_REF)
5278 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5279 else if (TREE_CODE (exp) == COMPONENT_REF)
5281 tree field = TREE_OPERAND (exp, 1);
5282 tree this_offset = component_ref_field_offset (exp);
5284 /* If this field hasn't been filled in yet, don't go
5285 past it. This should only happen when folding expressions
5286 made during type construction. */
5287 if (this_offset == 0)
5290 offset = size_binop (PLUS_EXPR, offset, this_offset);
5291 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5292 DECL_FIELD_BIT_OFFSET (field));
5294 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5297 else if (TREE_CODE (exp) == ARRAY_REF
5298 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5300 tree index = TREE_OPERAND (exp, 1);
5301 tree low_bound = array_ref_low_bound (exp);
5302 tree unit_size = array_ref_element_size (exp);
5304 /* We assume all arrays have sizes that are a multiple of a byte.
5305 First subtract the lower bound, if any, in the type of the
5306 index, then convert to sizetype and multiply by the size of the
5308 if (! integer_zerop (low_bound))
5309 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5312 offset = size_binop (PLUS_EXPR, offset,
5313 size_binop (MULT_EXPR,
5314 convert (sizetype, index),
5318 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5319 conversions that don't change the mode, and all view conversions
5320 except those that need to "step up" the alignment. */
5321 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5322 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5323 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5324 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5326 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5327 < BIGGEST_ALIGNMENT)
5328 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5329 || TYPE_ALIGN_OK (TREE_TYPE
5330 (TREE_OPERAND (exp, 0))))))
5331 && ! ((TREE_CODE (exp) == NOP_EXPR
5332 || TREE_CODE (exp) == CONVERT_EXPR)
5333 && (TYPE_MODE (TREE_TYPE (exp))
5334 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5337 /* If any reference in the chain is volatile, the effect is volatile. */
5338 if (TREE_THIS_VOLATILE (exp))
5341 exp = TREE_OPERAND (exp, 0);
5344 /* If OFFSET is constant, see if we can return the whole thing as a
5345 constant bit position. Otherwise, split it up. */
5346 if (host_integerp (offset, 0)
5347 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5349 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5350 && host_integerp (tem, 0))
5351 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5353 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5359 /* Return a tree of sizetype representing the size, in bytes, of the element
5360 of EXP, an ARRAY_REF. */
5363 array_ref_element_size (tree exp)
5365 tree aligned_size = TREE_OPERAND (exp, 3);
5366 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5368 /* If a size was specified in the ARRAY_REF, it's the size measured
5369 in alignment units of the element type. So multiply by that value. */
5371 return size_binop (MULT_EXPR, aligned_size,
5372 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5374 /* Otherwise, take the size from that of the element type. Substitute
5375 any PLACEHOLDER_EXPR that we have. */
5377 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5380 /* Return a tree representing the lower bound of the array mentioned in
5381 EXP, an ARRAY_REF. */
5384 array_ref_low_bound (tree exp)
5386 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5388 /* If a lower bound is specified in EXP, use it. */
5389 if (TREE_OPERAND (exp, 2))
5390 return TREE_OPERAND (exp, 2);
5392 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5393 substituting for a PLACEHOLDER_EXPR as needed. */
5394 if (domain_type && TYPE_MIN_VALUE (domain_type))
5395 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5397 /* Otherwise, return a zero of the appropriate type. */
5398 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5401 /* Return a tree representing the upper bound of the array mentioned in
5402 EXP, an ARRAY_REF. */
5405 array_ref_up_bound (tree exp)
5407 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5409 /* If there is a domain type and it has an upper bound, use it, substituting
5410 for a PLACEHOLDER_EXPR as needed. */
5411 if (domain_type && TYPE_MAX_VALUE (domain_type))
5412 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5414 /* Otherwise fail. */
5418 /* Return a tree representing the offset, in bytes, of the field referenced
5419 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5422 component_ref_field_offset (tree exp)
5424 tree aligned_offset = TREE_OPERAND (exp, 2);
5425 tree field = TREE_OPERAND (exp, 1);
5427 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5428 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5431 return size_binop (MULT_EXPR, aligned_offset,
5432 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5434 /* Otherwise, take the offset from that of the field. Substitute
5435 any PLACEHOLDER_EXPR that we have. */
5437 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5440 /* Return 1 if T is an expression that get_inner_reference handles. */
5443 handled_component_p (tree t)
5445 switch (TREE_CODE (t))
5450 case ARRAY_RANGE_REF:
5451 case NON_LVALUE_EXPR:
5452 case VIEW_CONVERT_EXPR:
5455 /* ??? Sure they are handled, but get_inner_reference may return
5456 a different PBITSIZE, depending upon whether the expression is
5457 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5460 return (TYPE_MODE (TREE_TYPE (t))
5461 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5468 /* Given an rtx VALUE that may contain additions and multiplications, return
5469 an equivalent value that just refers to a register, memory, or constant.
5470 This is done by generating instructions to perform the arithmetic and
5471 returning a pseudo-register containing the value.
5473 The returned value may be a REG, SUBREG, MEM or constant. */
5476 force_operand (rtx value, rtx target)
5479 /* Use subtarget as the target for operand 0 of a binary operation. */
5480 rtx subtarget = get_subtarget (target);
5481 enum rtx_code code = GET_CODE (value);
5483 /* Check for subreg applied to an expression produced by loop optimizer. */
5485 && !REG_P (SUBREG_REG (value))
5486 && !MEM_P (SUBREG_REG (value)))
5488 value = simplify_gen_subreg (GET_MODE (value),
5489 force_reg (GET_MODE (SUBREG_REG (value)),
5490 force_operand (SUBREG_REG (value),
5492 GET_MODE (SUBREG_REG (value)),
5493 SUBREG_BYTE (value));
5494 code = GET_CODE (value);
5497 /* Check for a PIC address load. */
5498 if ((code == PLUS || code == MINUS)
5499 && XEXP (value, 0) == pic_offset_table_rtx
5500 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5501 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5502 || GET_CODE (XEXP (value, 1)) == CONST))
5505 subtarget = gen_reg_rtx (GET_MODE (value));
5506 emit_move_insn (subtarget, value);
5510 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5513 target = gen_reg_rtx (GET_MODE (value));
5514 convert_move (target, force_operand (XEXP (value, 0), NULL),
5515 code == ZERO_EXTEND);
5519 if (ARITHMETIC_P (value))
5521 op2 = XEXP (value, 1);
5522 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5524 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5527 op2 = negate_rtx (GET_MODE (value), op2);
5530 /* Check for an addition with OP2 a constant integer and our first
5531 operand a PLUS of a virtual register and something else. In that
5532 case, we want to emit the sum of the virtual register and the
5533 constant first and then add the other value. This allows virtual
5534 register instantiation to simply modify the constant rather than
5535 creating another one around this addition. */
5536 if (code == PLUS && GET_CODE (op2) == CONST_INT
5537 && GET_CODE (XEXP (value, 0)) == PLUS
5538 && REG_P (XEXP (XEXP (value, 0), 0))
5539 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5540 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5542 rtx temp = expand_simple_binop (GET_MODE (value), code,
5543 XEXP (XEXP (value, 0), 0), op2,
5544 subtarget, 0, OPTAB_LIB_WIDEN);
5545 return expand_simple_binop (GET_MODE (value), code, temp,
5546 force_operand (XEXP (XEXP (value,
5548 target, 0, OPTAB_LIB_WIDEN);
5551 op1 = force_operand (XEXP (value, 0), subtarget);
5552 op2 = force_operand (op2, NULL_RTX);
5556 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5558 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5559 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5560 target, 1, OPTAB_LIB_WIDEN);
5562 return expand_divmod (0,
5563 FLOAT_MODE_P (GET_MODE (value))
5564 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5565 GET_MODE (value), op1, op2, target, 0);
5568 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5572 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5576 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5580 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5581 target, 0, OPTAB_LIB_WIDEN);
5584 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5585 target, 1, OPTAB_LIB_WIDEN);
5588 if (UNARY_P (value))
5590 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5591 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5594 #ifdef INSN_SCHEDULING
5595 /* On machines that have insn scheduling, we want all memory reference to be
5596 explicit, so we need to deal with such paradoxical SUBREGs. */
5597 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5598 && (GET_MODE_SIZE (GET_MODE (value))
5599 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5601 = simplify_gen_subreg (GET_MODE (value),
5602 force_reg (GET_MODE (SUBREG_REG (value)),
5603 force_operand (SUBREG_REG (value),
5605 GET_MODE (SUBREG_REG (value)),
5606 SUBREG_BYTE (value));
5612 /* Subroutine of expand_expr: return nonzero iff there is no way that
5613 EXP can reference X, which is being modified. TOP_P is nonzero if this
5614 call is going to be used to determine whether we need a temporary
5615 for EXP, as opposed to a recursive call to this function.
5617 It is always safe for this routine to return zero since it merely
5618 searches for optimization opportunities. */
5621 safe_from_p (rtx x, tree exp, int top_p)
5627 /* If EXP has varying size, we MUST use a target since we currently
5628 have no way of allocating temporaries of variable size
5629 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5630 So we assume here that something at a higher level has prevented a
5631 clash. This is somewhat bogus, but the best we can do. Only
5632 do this when X is BLKmode and when we are at the top level. */
5633 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5634 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5635 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5636 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5637 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5639 && GET_MODE (x) == BLKmode)
5640 /* If X is in the outgoing argument area, it is always safe. */
5642 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5643 || (GET_CODE (XEXP (x, 0)) == PLUS
5644 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5647 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5648 find the underlying pseudo. */
5649 if (GET_CODE (x) == SUBREG)
5652 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5656 /* Now look at our tree code and possibly recurse. */
5657 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5660 exp_rtl = DECL_RTL_IF_SET (exp);
5667 if (TREE_CODE (exp) == TREE_LIST)
5671 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5673 exp = TREE_CHAIN (exp);
5676 if (TREE_CODE (exp) != TREE_LIST)
5677 return safe_from_p (x, exp, 0);
5680 else if (TREE_CODE (exp) == ERROR_MARK)
5681 return 1; /* An already-visited SAVE_EXPR? */
5686 /* The only case we look at here is the DECL_INITIAL inside a
5688 return (TREE_CODE (exp) != DECL_EXPR
5689 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5690 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5691 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5695 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5700 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5704 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5705 the expression. If it is set, we conflict iff we are that rtx or
5706 both are in memory. Otherwise, we check all operands of the
5707 expression recursively. */
5709 switch (TREE_CODE (exp))
5712 /* If the operand is static or we are static, we can't conflict.
5713 Likewise if we don't conflict with the operand at all. */
5714 if (staticp (TREE_OPERAND (exp, 0))
5715 || TREE_STATIC (exp)
5716 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5719 /* Otherwise, the only way this can conflict is if we are taking
5720 the address of a DECL a that address if part of X, which is
5722 exp = TREE_OPERAND (exp, 0);
5725 if (!DECL_RTL_SET_P (exp)
5726 || !MEM_P (DECL_RTL (exp)))
5729 exp_rtl = XEXP (DECL_RTL (exp), 0);
5735 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5736 get_alias_set (exp)))
5741 /* Assume that the call will clobber all hard registers and
5743 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5748 case WITH_CLEANUP_EXPR:
5749 case CLEANUP_POINT_EXPR:
5750 /* Lowered by gimplify.c. */
5754 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5757 /* The only operand we look at is operand 1. The rest aren't
5758 part of the expression. */
5759 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5765 /* If we have an rtx, we do not need to scan our operands. */
5769 nops = first_rtl_op (TREE_CODE (exp));
5770 for (i = 0; i < nops; i++)
5771 if (TREE_OPERAND (exp, i) != 0
5772 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5775 /* If this is a language-specific tree code, it may require
5776 special handling. */
5777 if ((unsigned int) TREE_CODE (exp)
5778 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5779 && !lang_hooks.safe_from_p (x, exp))
5783 /* If we have an rtl, find any enclosed object. Then see if we conflict
5787 if (GET_CODE (exp_rtl) == SUBREG)
5789 exp_rtl = SUBREG_REG (exp_rtl);
5791 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5795 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5796 are memory and they conflict. */
5797 return ! (rtx_equal_p (x, exp_rtl)
5798 || (MEM_P (x) && MEM_P (exp_rtl)
5799 && true_dependence (exp_rtl, VOIDmode, x,
5800 rtx_addr_varies_p)));
5803 /* If we reach here, it is safe. */
5807 /* Subroutine of expand_expr: return rtx if EXP is a
5808 variable or parameter; else return 0. */
5814 switch (TREE_CODE (exp))
5818 return DECL_RTL (exp);
5824 /* Return the highest power of two that EXP is known to be a multiple of.
5825 This is used in updating alignment of MEMs in array references. */
5827 static unsigned HOST_WIDE_INT
5828 highest_pow2_factor (tree exp)
5830 unsigned HOST_WIDE_INT c0, c1;
5832 switch (TREE_CODE (exp))
5835 /* We can find the lowest bit that's a one. If the low
5836 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5837 We need to handle this case since we can find it in a COND_EXPR,
5838 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5839 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5841 if (TREE_CONSTANT_OVERFLOW (exp))
5842 return BIGGEST_ALIGNMENT;
5845 /* Note: tree_low_cst is intentionally not used here,
5846 we don't care about the upper bits. */
5847 c0 = TREE_INT_CST_LOW (exp);
5849 return c0 ? c0 : BIGGEST_ALIGNMENT;
5853 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5854 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5855 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5856 return MIN (c0, c1);
5859 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5860 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5863 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5865 if (integer_pow2p (TREE_OPERAND (exp, 1))
5866 && host_integerp (TREE_OPERAND (exp, 1), 1))
5868 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5869 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5870 return MAX (1, c0 / c1);
5874 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5876 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5879 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5882 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5884 return MIN (c0, c1);
5893 /* Similar, except that the alignment requirements of TARGET are
5894 taken into account. Assume it is at least as aligned as its
5895 type, unless it is a COMPONENT_REF in which case the layout of
5896 the structure gives the alignment. */
5898 static unsigned HOST_WIDE_INT
5899 highest_pow2_factor_for_target (tree target, tree exp)
5901 unsigned HOST_WIDE_INT target_align, factor;
5903 factor = highest_pow2_factor (exp);
5904 if (TREE_CODE (target) == COMPONENT_REF)
5905 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5907 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
5908 return MAX (factor, target_align);
5911 /* Expands variable VAR. */
5914 expand_var (tree var)
5916 if (DECL_EXTERNAL (var))
5919 if (TREE_STATIC (var))
5920 /* If this is an inlined copy of a static local variable,
5921 look up the original decl. */
5922 var = DECL_ORIGIN (var);
5924 if (TREE_STATIC (var)
5925 ? !TREE_ASM_WRITTEN (var)
5926 : !DECL_RTL_SET_P (var))
5928 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
5930 /* Prepare a mem & address for the decl. */
5933 if (TREE_STATIC (var))
5936 x = gen_rtx_MEM (DECL_MODE (var),
5937 gen_reg_rtx (Pmode));
5939 set_mem_attributes (x, var, 1);
5940 SET_DECL_RTL (var, x);
5942 else if (lang_hooks.expand_decl (var))
5944 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5946 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5947 rest_of_decl_compilation (var, NULL, 0, 0);
5948 else if (TREE_CODE (var) == TYPE_DECL
5949 || TREE_CODE (var) == CONST_DECL
5950 || TREE_CODE (var) == FUNCTION_DECL
5951 || TREE_CODE (var) == LABEL_DECL)
5952 /* No expansion needed. */;
5958 /* Expands declarations of variables in list VARS. */
5961 expand_vars (tree vars)
5963 for (; vars; vars = TREE_CHAIN (vars))
5967 if (DECL_EXTERNAL (var))
5971 expand_decl_init (var);
5975 /* Subroutine of expand_expr. Expand the two operands of a binary
5976 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5977 The value may be stored in TARGET if TARGET is nonzero. The
5978 MODIFIER argument is as documented by expand_expr. */
5981 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5982 enum expand_modifier modifier)
5984 if (! safe_from_p (target, exp1, 1))
5986 if (operand_equal_p (exp0, exp1, 0))
5988 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5989 *op1 = copy_rtx (*op0);
5993 /* If we need to preserve evaluation order, copy exp0 into its own
5994 temporary variable so that it can't be clobbered by exp1. */
5995 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5996 exp0 = save_expr (exp0);
5997 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5998 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6003 /* expand_expr: generate code for computing expression EXP.
6004 An rtx for the computed value is returned. The value is never null.
6005 In the case of a void EXP, const0_rtx is returned.
6007 The value may be stored in TARGET if TARGET is nonzero.
6008 TARGET is just a suggestion; callers must assume that
6009 the rtx returned may not be the same as TARGET.
6011 If TARGET is CONST0_RTX, it means that the value will be ignored.
6013 If TMODE is not VOIDmode, it suggests generating the
6014 result in mode TMODE. But this is done only when convenient.
6015 Otherwise, TMODE is ignored and the value generated in its natural mode.
6016 TMODE is just a suggestion; callers must assume that
6017 the rtx returned may not have mode TMODE.
6019 Note that TARGET may have neither TMODE nor MODE. In that case, it
6020 probably will not be used.
6022 If MODIFIER is EXPAND_SUM then when EXP is an addition
6023 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6024 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6025 products as above, or REG or MEM, or constant.
6026 Ordinarily in such cases we would output mul or add instructions
6027 and then return a pseudo reg containing the sum.
6029 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6030 it also marks a label as absolutely required (it can't be dead).
6031 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6032 This is used for outputting expressions used in initializers.
6034 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6035 with a constant address even if that address is not normally legitimate.
6036 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6038 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6039 a call parameter. Such targets require special care as we haven't yet
6040 marked TARGET so that it's safe from being trashed by libcalls. We
6041 don't want to use TARGET for anything but the final result;
6042 Intermediate values must go elsewhere. Additionally, calls to
6043 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6045 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6046 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6047 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6048 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6051 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6052 enum expand_modifier, rtx *);
6055 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6056 enum expand_modifier modifier, rtx *alt_rtl)
6059 rtx ret, last = NULL;
6061 /* Handle ERROR_MARK before anybody tries to access its type. */
6062 if (TREE_CODE (exp) == ERROR_MARK
6063 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6065 ret = CONST0_RTX (tmode);
6066 return ret ? ret : const0_rtx;
6069 if (flag_non_call_exceptions)
6071 rn = lookup_stmt_eh_region (exp);
6072 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6074 last = get_last_insn ();
6077 /* If this is an expression of some kind and it has an associated line
6078 number, then emit the line number before expanding the expression.
6080 We need to save and restore the file and line information so that
6081 errors discovered during expansion are emitted with the right
6082 information. It would be better of the diagnostic routines
6083 used the file/line information embedded in the tree nodes rather
6085 if (cfun && EXPR_HAS_LOCATION (exp))
6087 location_t saved_location = input_location;
6088 input_location = EXPR_LOCATION (exp);
6089 emit_line_note (input_location);
6091 /* Record where the insns produced belong. */
6092 record_block_change (TREE_BLOCK (exp));
6094 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6096 input_location = saved_location;
6100 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6103 /* If using non-call exceptions, mark all insns that may trap.
6104 expand_call() will mark CALL_INSNs before we get to this code,
6105 but it doesn't handle libcalls, and these may trap. */
6109 for (insn = next_real_insn (last); insn;
6110 insn = next_real_insn (insn))
6112 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6113 /* If we want exceptions for non-call insns, any
6114 may_trap_p instruction may throw. */
6115 && GET_CODE (PATTERN (insn)) != CLOBBER
6116 && GET_CODE (PATTERN (insn)) != USE
6117 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6119 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6129 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6130 enum expand_modifier modifier, rtx *alt_rtl)
6133 tree type = TREE_TYPE (exp);
6135 enum machine_mode mode;
6136 enum tree_code code = TREE_CODE (exp);
6138 rtx subtarget, original_target;
6141 bool reduce_bit_field = false;
6142 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6143 ? reduce_to_bit_field_precision ((expr), \
6148 mode = TYPE_MODE (type);
6149 unsignedp = TYPE_UNSIGNED (type);
6150 if (lang_hooks.reduce_bit_field_operations
6151 && TREE_CODE (type) == INTEGER_TYPE
6152 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6154 /* An operation in what may be a bit-field type needs the
6155 result to be reduced to the precision of the bit-field type,
6156 which is narrower than that of the type's mode. */
6157 reduce_bit_field = true;
6158 if (modifier == EXPAND_STACK_PARM)
6162 /* Use subtarget as the target for operand 0 of a binary operation. */
6163 subtarget = get_subtarget (target);
6164 original_target = target;
6165 ignore = (target == const0_rtx
6166 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6167 || code == CONVERT_EXPR || code == COND_EXPR
6168 || code == VIEW_CONVERT_EXPR)
6169 && TREE_CODE (type) == VOID_TYPE));
6171 /* If we are going to ignore this result, we need only do something
6172 if there is a side-effect somewhere in the expression. If there
6173 is, short-circuit the most common cases here. Note that we must
6174 not call expand_expr with anything but const0_rtx in case this
6175 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6179 if (! TREE_SIDE_EFFECTS (exp))
6182 /* Ensure we reference a volatile object even if value is ignored, but
6183 don't do this if all we are doing is taking its address. */
6184 if (TREE_THIS_VOLATILE (exp)
6185 && TREE_CODE (exp) != FUNCTION_DECL
6186 && mode != VOIDmode && mode != BLKmode
6187 && modifier != EXPAND_CONST_ADDRESS)
6189 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6191 temp = copy_to_reg (temp);
6195 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6196 || code == INDIRECT_REF)
6197 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6200 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6201 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6203 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6204 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6207 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6208 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6209 /* If the second operand has no side effects, just evaluate
6211 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6213 else if (code == BIT_FIELD_REF)
6215 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6216 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6217 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6224 /* If will do cse, generate all results into pseudo registers
6225 since 1) that allows cse to find more things
6226 and 2) otherwise cse could produce an insn the machine
6227 cannot support. An exception is a CONSTRUCTOR into a multi-word
6228 MEM: that's much more likely to be most efficient into the MEM.
6229 Another is a CALL_EXPR which must return in memory. */
6231 if (! cse_not_expected && mode != BLKmode && target
6232 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6233 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6234 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6241 tree function = decl_function_context (exp);
6243 temp = label_rtx (exp);
6244 temp = gen_rtx_LABEL_REF (Pmode, temp);
6246 if (function != current_function_decl
6248 LABEL_REF_NONLOCAL_P (temp) = 1;
6250 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6255 if (!DECL_RTL_SET_P (exp))
6257 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6258 return CONST0_RTX (mode);
6261 /* ... fall through ... */
6264 /* If a static var's type was incomplete when the decl was written,
6265 but the type is complete now, lay out the decl now. */
6266 if (DECL_SIZE (exp) == 0
6267 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6268 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6269 layout_decl (exp, 0);
6271 /* ... fall through ... */
6275 if (DECL_RTL (exp) == 0)
6278 /* Ensure variable marked as used even if it doesn't go through
6279 a parser. If it hasn't be used yet, write out an external
6281 if (! TREE_USED (exp))
6283 assemble_external (exp);
6284 TREE_USED (exp) = 1;
6287 /* Show we haven't gotten RTL for this yet. */
6290 /* Handle variables inherited from containing functions. */
6291 context = decl_function_context (exp);
6293 if (context != 0 && context != current_function_decl
6294 /* If var is static, we don't need a static chain to access it. */
6295 && ! (MEM_P (DECL_RTL (exp))
6296 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6300 /* Mark as non-local and addressable. */
6301 DECL_NONLOCAL (exp) = 1;
6302 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6304 lang_hooks.mark_addressable (exp);
6305 if (!MEM_P (DECL_RTL (exp)))
6307 addr = XEXP (DECL_RTL (exp), 0);
6310 = replace_equiv_address (addr,
6311 fix_lexical_addr (XEXP (addr, 0), exp));
6313 addr = fix_lexical_addr (addr, exp);
6315 temp = replace_equiv_address (DECL_RTL (exp), addr);
6318 /* This is the case of an array whose size is to be determined
6319 from its initializer, while the initializer is still being parsed.
6322 else if (MEM_P (DECL_RTL (exp))
6323 && REG_P (XEXP (DECL_RTL (exp), 0)))
6324 temp = validize_mem (DECL_RTL (exp));
6326 /* If DECL_RTL is memory, we are in the normal case and either
6327 the address is not valid or it is not a register and -fforce-addr
6328 is specified, get the address into a register. */
6330 else if (MEM_P (DECL_RTL (exp))
6331 && modifier != EXPAND_CONST_ADDRESS
6332 && modifier != EXPAND_SUM
6333 && modifier != EXPAND_INITIALIZER
6334 && (! memory_address_p (DECL_MODE (exp),
6335 XEXP (DECL_RTL (exp), 0))
6337 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6340 *alt_rtl = DECL_RTL (exp);
6341 temp = replace_equiv_address (DECL_RTL (exp),
6342 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6345 /* If we got something, return it. But first, set the alignment
6346 if the address is a register. */
6349 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6350 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6355 /* If the mode of DECL_RTL does not match that of the decl, it
6356 must be a promoted value. We return a SUBREG of the wanted mode,
6357 but mark it so that we know that it was already extended. */
6359 if (REG_P (DECL_RTL (exp))
6360 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6362 /* Get the signedness used for this variable. Ensure we get the
6363 same mode we got when the variable was declared. */
6364 if (GET_MODE (DECL_RTL (exp))
6365 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6366 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6369 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6370 SUBREG_PROMOTED_VAR_P (temp) = 1;
6371 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6375 return DECL_RTL (exp);
6378 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6379 TREE_INT_CST_HIGH (exp), mode);
6381 /* ??? If overflow is set, fold will have done an incomplete job,
6382 which can result in (plus xx (const_int 0)), which can get
6383 simplified by validate_replace_rtx during virtual register
6384 instantiation, which can result in unrecognizable insns.
6385 Avoid this by forcing all overflows into registers. */
6386 if (TREE_CONSTANT_OVERFLOW (exp)
6387 && modifier != EXPAND_INITIALIZER)
6388 temp = force_reg (mode, temp);
6393 return const_vector_from_tree (exp);
6396 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6399 /* If optimized, generate immediate CONST_DOUBLE
6400 which will be turned into memory by reload if necessary.
6402 We used to force a register so that loop.c could see it. But
6403 this does not allow gen_* patterns to perform optimizations with
6404 the constants. It also produces two insns in cases like "x = 1.0;".
6405 On most machines, floating-point constants are not permitted in
6406 many insns, so we'd end up copying it to a register in any case.
6408 Now, we do the copying in expand_binop, if appropriate. */
6409 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6410 TYPE_MODE (TREE_TYPE (exp)));
6413 /* Handle evaluating a complex constant in a CONCAT target. */
6414 if (original_target && GET_CODE (original_target) == CONCAT)
6416 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6419 rtarg = XEXP (original_target, 0);
6420 itarg = XEXP (original_target, 1);
6422 /* Move the real and imaginary parts separately. */
6423 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6424 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6427 emit_move_insn (rtarg, op0);
6429 emit_move_insn (itarg, op1);
6431 return original_target;
6434 /* ... fall through ... */
6437 temp = output_constant_def (exp, 1);
6439 /* temp contains a constant address.
6440 On RISC machines where a constant address isn't valid,
6441 make some insns to get that address into a register. */
6442 if (modifier != EXPAND_CONST_ADDRESS
6443 && modifier != EXPAND_INITIALIZER
6444 && modifier != EXPAND_SUM
6445 && (! memory_address_p (mode, XEXP (temp, 0))
6446 || flag_force_addr))
6447 return replace_equiv_address (temp,
6448 copy_rtx (XEXP (temp, 0)));
6453 tree val = TREE_OPERAND (exp, 0);
6454 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6456 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6458 /* We can indeed still hit this case, typically via builtin
6459 expanders calling save_expr immediately before expanding
6460 something. Assume this means that we only have to deal
6461 with non-BLKmode values. */
6462 if (GET_MODE (ret) == BLKmode)
6465 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6466 DECL_ARTIFICIAL (val) = 1;
6467 TREE_OPERAND (exp, 0) = val;
6469 if (!CONSTANT_P (ret))
6470 ret = copy_to_reg (ret);
6471 SET_DECL_RTL (val, ret);
6480 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6481 TREE_OPERAND (exp, 0)
6482 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6487 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6488 expand_goto (TREE_OPERAND (exp, 0));
6490 expand_computed_goto (TREE_OPERAND (exp, 0));
6493 /* These are lowered during gimplification, so we should never ever
6499 case LABELED_BLOCK_EXPR:
6500 if (LABELED_BLOCK_BODY (exp))
6501 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6502 /* Should perhaps use expand_label, but this is simpler and safer. */
6503 do_pending_stack_adjust ();
6504 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6507 case EXIT_BLOCK_EXPR:
6508 if (EXIT_BLOCK_RETURN (exp))
6509 sorry ("returned value in block_exit_expr");
6510 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6515 tree block = BIND_EXPR_BLOCK (exp);
6518 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6519 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6520 mark_ends = (block != NULL_TREE);
6521 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6523 /* If VARS have not yet been expanded, expand them now. */
6524 expand_vars (BIND_EXPR_VARS (exp));
6526 /* TARGET was clobbered early in this function. The correct
6527 indicator or whether or not we need the value of this
6528 expression is the IGNORE variable. */
6529 temp = expand_expr (BIND_EXPR_BODY (exp),
6530 ignore ? const0_rtx : target,
6533 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6539 /* If we don't need the result, just ensure we evaluate any
6545 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6546 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6551 /* All elts simple constants => refer to a constant in memory. But
6552 if this is a non-BLKmode mode, let it store a field at a time
6553 since that should make a CONST_INT or CONST_DOUBLE when we
6554 fold. Likewise, if we have a target we can use, it is best to
6555 store directly into the target unless the type is large enough
6556 that memcpy will be used. If we are making an initializer and
6557 all operands are constant, put it in memory as well.
6559 FIXME: Avoid trying to fill vector constructors piece-meal.
6560 Output them with output_constant_def below unless we're sure
6561 they're zeros. This should go away when vector initializers
6562 are treated like VECTOR_CST instead of arrays.
6564 else if ((TREE_STATIC (exp)
6565 && ((mode == BLKmode
6566 && ! (target != 0 && safe_from_p (target, exp, 1)))
6567 || TREE_ADDRESSABLE (exp)
6568 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6569 && (! MOVE_BY_PIECES_P
6570 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6572 && ! mostly_zeros_p (exp))))
6573 || ((modifier == EXPAND_INITIALIZER
6574 || modifier == EXPAND_CONST_ADDRESS)
6575 && TREE_CONSTANT (exp)))
6577 rtx constructor = output_constant_def (exp, 1);
6579 if (modifier != EXPAND_CONST_ADDRESS
6580 && modifier != EXPAND_INITIALIZER
6581 && modifier != EXPAND_SUM)
6582 constructor = validize_mem (constructor);
6588 /* Handle calls that pass values in multiple non-contiguous
6589 locations. The Irix 6 ABI has examples of this. */
6590 if (target == 0 || ! safe_from_p (target, exp, 1)
6591 || GET_CODE (target) == PARALLEL
6592 || modifier == EXPAND_STACK_PARM)
6594 = assign_temp (build_qualified_type (type,
6596 | (TREE_READONLY (exp)
6597 * TYPE_QUAL_CONST))),
6598 0, TREE_ADDRESSABLE (exp), 1);
6600 store_constructor (exp, target, 0, int_expr_size (exp));
6606 tree exp1 = TREE_OPERAND (exp, 0);
6608 if (modifier != EXPAND_WRITE)
6612 t = fold_read_from_constant_string (exp);
6614 return expand_expr (t, target, tmode, modifier);
6617 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6618 op0 = memory_address (mode, op0);
6619 temp = gen_rtx_MEM (mode, op0);
6620 set_mem_attributes (temp, exp, 0);
6622 /* If we are writing to this object and its type is a record with
6623 readonly fields, we must mark it as readonly so it will
6624 conflict with readonly references to those fields. */
6625 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6626 RTX_UNCHANGING_P (temp) = 1;
6633 #ifdef ENABLE_CHECKING
6634 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6639 tree array = TREE_OPERAND (exp, 0);
6640 tree low_bound = array_ref_low_bound (exp);
6641 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6644 /* Optimize the special-case of a zero lower bound.
6646 We convert the low_bound to sizetype to avoid some problems
6647 with constant folding. (E.g. suppose the lower bound is 1,
6648 and its mode is QI. Without the conversion, (ARRAY
6649 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6650 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6652 if (! integer_zerop (low_bound))
6653 index = size_diffop (index, convert (sizetype, low_bound));
6655 /* Fold an expression like: "foo"[2].
6656 This is not done in fold so it won't happen inside &.
6657 Don't fold if this is for wide characters since it's too
6658 difficult to do correctly and this is a very rare case. */
6660 if (modifier != EXPAND_CONST_ADDRESS
6661 && modifier != EXPAND_INITIALIZER
6662 && modifier != EXPAND_MEMORY)
6664 tree t = fold_read_from_constant_string (exp);
6667 return expand_expr (t, target, tmode, modifier);
6670 /* If this is a constant index into a constant array,
6671 just get the value from the array. Handle both the cases when
6672 we have an explicit constructor and when our operand is a variable
6673 that was declared const. */
6675 if (modifier != EXPAND_CONST_ADDRESS
6676 && modifier != EXPAND_INITIALIZER
6677 && modifier != EXPAND_MEMORY
6678 && TREE_CODE (array) == CONSTRUCTOR
6679 && ! TREE_SIDE_EFFECTS (array)
6680 && TREE_CODE (index) == INTEGER_CST
6681 && 0 > compare_tree_int (index,
6682 list_length (CONSTRUCTOR_ELTS
6683 (TREE_OPERAND (exp, 0)))))
6687 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6688 i = TREE_INT_CST_LOW (index);
6689 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6693 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6697 else if (optimize >= 1
6698 && modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_MEMORY
6701 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6702 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6703 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6704 && targetm.binds_local_p (array))
6706 if (TREE_CODE (index) == INTEGER_CST)
6708 tree init = DECL_INITIAL (array);
6710 if (TREE_CODE (init) == CONSTRUCTOR)
6714 for (elem = CONSTRUCTOR_ELTS (init);
6716 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6717 elem = TREE_CHAIN (elem))
6720 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6721 return expand_expr (fold (TREE_VALUE (elem)), target,
6724 else if (TREE_CODE (init) == STRING_CST
6725 && 0 > compare_tree_int (index,
6726 TREE_STRING_LENGTH (init)))
6728 tree type = TREE_TYPE (TREE_TYPE (init));
6729 enum machine_mode mode = TYPE_MODE (type);
6731 if (GET_MODE_CLASS (mode) == MODE_INT
6732 && GET_MODE_SIZE (mode) == 1)
6733 return gen_int_mode (TREE_STRING_POINTER (init)
6734 [TREE_INT_CST_LOW (index)], mode);
6739 goto normal_inner_ref;
6742 /* If the operand is a CONSTRUCTOR, we can just extract the
6743 appropriate field if it is present. */
6744 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6748 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6749 elt = TREE_CHAIN (elt))
6750 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6751 /* We can normally use the value of the field in the
6752 CONSTRUCTOR. However, if this is a bitfield in
6753 an integral mode that we can fit in a HOST_WIDE_INT,
6754 we must mask only the number of bits in the bitfield,
6755 since this is done implicitly by the constructor. If
6756 the bitfield does not meet either of those conditions,
6757 we can't do this optimization. */
6758 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6759 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6761 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6762 <= HOST_BITS_PER_WIDE_INT))))
6764 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6765 && modifier == EXPAND_STACK_PARM)
6767 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6768 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6770 HOST_WIDE_INT bitsize
6771 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6772 enum machine_mode imode
6773 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6775 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6777 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6778 op0 = expand_and (imode, op0, op1, target);
6783 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6786 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6788 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6796 goto normal_inner_ref;
6799 case ARRAY_RANGE_REF:
6802 enum machine_mode mode1;
6803 HOST_WIDE_INT bitsize, bitpos;
6806 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6807 &mode1, &unsignedp, &volatilep);
6810 /* If we got back the original object, something is wrong. Perhaps
6811 we are evaluating an expression too early. In any event, don't
6812 infinitely recurse. */
6816 /* If TEM's type is a union of variable size, pass TARGET to the inner
6817 computation, since it will need a temporary and TARGET is known
6818 to have to do. This occurs in unchecked conversion in Ada. */
6822 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6823 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6825 && modifier != EXPAND_STACK_PARM
6826 ? target : NULL_RTX),
6828 (modifier == EXPAND_INITIALIZER
6829 || modifier == EXPAND_CONST_ADDRESS
6830 || modifier == EXPAND_STACK_PARM)
6831 ? modifier : EXPAND_NORMAL);
6833 /* If this is a constant, put it into a register if it is a
6834 legitimate constant and OFFSET is 0 and memory if it isn't. */
6835 if (CONSTANT_P (op0))
6837 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6838 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6840 op0 = force_reg (mode, op0);
6842 op0 = validize_mem (force_const_mem (mode, op0));
6845 /* Otherwise, if this object not in memory and we either have an
6846 offset or a BLKmode result, put it there. This case can't occur in
6847 C, but can in Ada if we have unchecked conversion of an expression
6848 from a scalar type to an array or record type or for an
6849 ARRAY_RANGE_REF whose type is BLKmode. */
6850 else if (!MEM_P (op0)
6852 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6854 tree nt = build_qualified_type (TREE_TYPE (tem),
6855 (TYPE_QUALS (TREE_TYPE (tem))
6856 | TYPE_QUAL_CONST));
6857 rtx memloc = assign_temp (nt, 1, 1, 1);
6859 emit_move_insn (memloc, op0);
6865 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6871 #ifdef POINTERS_EXTEND_UNSIGNED
6872 if (GET_MODE (offset_rtx) != Pmode)
6873 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6875 if (GET_MODE (offset_rtx) != ptr_mode)
6876 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6879 if (GET_MODE (op0) == BLKmode
6880 /* A constant address in OP0 can have VOIDmode, we must
6881 not try to call force_reg in that case. */
6882 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6884 && (bitpos % bitsize) == 0
6885 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6886 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6888 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6892 op0 = offset_address (op0, offset_rtx,
6893 highest_pow2_factor (offset));
6896 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6897 record its alignment as BIGGEST_ALIGNMENT. */
6898 if (MEM_P (op0) && bitpos == 0 && offset != 0
6899 && is_aligning_offset (offset, tem))
6900 set_mem_align (op0, BIGGEST_ALIGNMENT);
6902 /* Don't forget about volatility even if this is a bitfield. */
6903 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6905 if (op0 == orig_op0)
6906 op0 = copy_rtx (op0);
6908 MEM_VOLATILE_P (op0) = 1;
6911 /* The following code doesn't handle CONCAT.
6912 Assume only bitpos == 0 can be used for CONCAT, due to
6913 one element arrays having the same mode as its element. */
6914 if (GET_CODE (op0) == CONCAT)
6916 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6921 /* In cases where an aligned union has an unaligned object
6922 as a field, we might be extracting a BLKmode value from
6923 an integer-mode (e.g., SImode) object. Handle this case
6924 by doing the extract into an object as wide as the field
6925 (which we know to be the width of a basic mode), then
6926 storing into memory, and changing the mode to BLKmode. */
6927 if (mode1 == VOIDmode
6928 || REG_P (op0) || GET_CODE (op0) == SUBREG
6929 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6930 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6931 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6932 && modifier != EXPAND_CONST_ADDRESS
6933 && modifier != EXPAND_INITIALIZER)
6934 /* If the field isn't aligned enough to fetch as a memref,
6935 fetch it as a bit field. */
6936 || (mode1 != BLKmode
6937 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6938 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6940 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6941 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6942 && ((modifier == EXPAND_CONST_ADDRESS
6943 || modifier == EXPAND_INITIALIZER)
6945 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6946 || (bitpos % BITS_PER_UNIT != 0)))
6947 /* If the type and the field are a constant size and the
6948 size of the type isn't the same size as the bitfield,
6949 we must use bitfield operations. */
6951 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6953 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6956 enum machine_mode ext_mode = mode;
6958 if (ext_mode == BLKmode
6959 && ! (target != 0 && MEM_P (op0)
6961 && bitpos % BITS_PER_UNIT == 0))
6962 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6964 if (ext_mode == BLKmode)
6967 target = assign_temp (type, 0, 1, 1);
6972 /* In this case, BITPOS must start at a byte boundary and
6973 TARGET, if specified, must be a MEM. */
6975 || (target != 0 && !MEM_P (target))
6976 || bitpos % BITS_PER_UNIT != 0)
6979 emit_block_move (target,
6980 adjust_address (op0, VOIDmode,
6981 bitpos / BITS_PER_UNIT),
6982 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6984 (modifier == EXPAND_STACK_PARM
6985 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6990 op0 = validize_mem (op0);
6992 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
6993 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6995 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
6996 (modifier == EXPAND_STACK_PARM
6997 ? NULL_RTX : target),
6998 ext_mode, ext_mode);
7000 /* If the result is a record type and BITSIZE is narrower than
7001 the mode of OP0, an integral mode, and this is a big endian
7002 machine, we must put the field into the high-order bits. */
7003 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7004 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7005 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7006 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7007 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7011 /* If the result type is BLKmode, store the data into a temporary
7012 of the appropriate type, but with the mode corresponding to the
7013 mode for the data we have (op0's mode). It's tempting to make
7014 this a constant type, since we know it's only being stored once,
7015 but that can cause problems if we are taking the address of this
7016 COMPONENT_REF because the MEM of any reference via that address
7017 will have flags corresponding to the type, which will not
7018 necessarily be constant. */
7019 if (mode == BLKmode)
7022 = assign_stack_temp_for_type
7023 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7025 emit_move_insn (new, op0);
7026 op0 = copy_rtx (new);
7027 PUT_MODE (op0, BLKmode);
7028 set_mem_attributes (op0, exp, 1);
7034 /* If the result is BLKmode, use that to access the object
7036 if (mode == BLKmode)
7039 /* Get a reference to just this component. */
7040 if (modifier == EXPAND_CONST_ADDRESS
7041 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7042 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7044 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7046 if (op0 == orig_op0)
7047 op0 = copy_rtx (op0);
7049 set_mem_attributes (op0, exp, 0);
7050 if (REG_P (XEXP (op0, 0)))
7051 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7053 MEM_VOLATILE_P (op0) |= volatilep;
7054 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7055 || modifier == EXPAND_CONST_ADDRESS
7056 || modifier == EXPAND_INITIALIZER)
7058 else if (target == 0)
7059 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7061 convert_move (target, op0, unsignedp);
7066 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7069 /* Check for a built-in function. */
7070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7071 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7073 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7075 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7076 == BUILT_IN_FRONTEND)
7077 return lang_hooks.expand_expr (exp, original_target,
7081 return expand_builtin (exp, target, subtarget, tmode, ignore);
7084 return expand_call (exp, target, ignore);
7086 case NON_LVALUE_EXPR:
7089 if (TREE_OPERAND (exp, 0) == error_mark_node)
7092 if (TREE_CODE (type) == UNION_TYPE)
7094 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7096 /* If both input and output are BLKmode, this conversion isn't doing
7097 anything except possibly changing memory attribute. */
7098 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7100 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7103 result = copy_rtx (result);
7104 set_mem_attributes (result, exp, 0);
7110 if (TYPE_MODE (type) != BLKmode)
7111 target = gen_reg_rtx (TYPE_MODE (type));
7113 target = assign_temp (type, 0, 1, 1);
7117 /* Store data into beginning of memory target. */
7118 store_expr (TREE_OPERAND (exp, 0),
7119 adjust_address (target, TYPE_MODE (valtype), 0),
7120 modifier == EXPAND_STACK_PARM ? 2 : 0);
7122 else if (REG_P (target))
7123 /* Store this field into a union of the proper type. */
7124 store_field (target,
7125 MIN ((int_size_in_bytes (TREE_TYPE
7126 (TREE_OPERAND (exp, 0)))
7128 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7129 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7130 VOIDmode, 0, type, 0);
7134 /* Return the entire union. */
7138 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7140 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7143 /* If the signedness of the conversion differs and OP0 is
7144 a promoted SUBREG, clear that indication since we now
7145 have to do the proper extension. */
7146 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7147 && GET_CODE (op0) == SUBREG)
7148 SUBREG_PROMOTED_VAR_P (op0) = 0;
7150 return REDUCE_BIT_FIELD (op0);
7153 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7154 op0 = REDUCE_BIT_FIELD (op0);
7155 if (GET_MODE (op0) == mode)
7158 /* If OP0 is a constant, just convert it into the proper mode. */
7159 if (CONSTANT_P (op0))
7161 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7162 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7164 if (modifier == EXPAND_INITIALIZER)
7165 return simplify_gen_subreg (mode, op0, inner_mode,
7166 subreg_lowpart_offset (mode,
7169 return convert_modes (mode, inner_mode, op0,
7170 TYPE_UNSIGNED (inner_type));
7173 if (modifier == EXPAND_INITIALIZER)
7174 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7178 convert_to_mode (mode, op0,
7179 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7181 convert_move (target, op0,
7182 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7185 case VIEW_CONVERT_EXPR:
7186 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7188 /* If the input and output modes are both the same, we are done.
7189 Otherwise, if neither mode is BLKmode and both are integral and within
7190 a word, we can use gen_lowpart. If neither is true, make sure the
7191 operand is in memory and convert the MEM to the new mode. */
7192 if (TYPE_MODE (type) == GET_MODE (op0))
7194 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7195 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7196 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7197 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7198 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7199 op0 = gen_lowpart (TYPE_MODE (type), op0);
7200 else if (!MEM_P (op0))
7202 /* If the operand is not a MEM, force it into memory. Since we
7203 are going to be be changing the mode of the MEM, don't call
7204 force_const_mem for constants because we don't allow pool
7205 constants to change mode. */
7206 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7208 if (TREE_ADDRESSABLE (exp))
7211 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7213 = assign_stack_temp_for_type
7214 (TYPE_MODE (inner_type),
7215 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7217 emit_move_insn (target, op0);
7221 /* At this point, OP0 is in the correct mode. If the output type is such
7222 that the operand is known to be aligned, indicate that it is.
7223 Otherwise, we need only be concerned about alignment for non-BLKmode
7227 op0 = copy_rtx (op0);
7229 if (TYPE_ALIGN_OK (type))
7230 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7231 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7232 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7234 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7235 HOST_WIDE_INT temp_size
7236 = MAX (int_size_in_bytes (inner_type),
7237 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7238 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7239 temp_size, 0, type);
7240 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7242 if (TREE_ADDRESSABLE (exp))
7245 if (GET_MODE (op0) == BLKmode)
7246 emit_block_move (new_with_op0_mode, op0,
7247 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7248 (modifier == EXPAND_STACK_PARM
7249 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7251 emit_move_insn (new_with_op0_mode, op0);
7256 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7262 this_optab = ! unsignedp && flag_trapv
7263 && (GET_MODE_CLASS (mode) == MODE_INT)
7264 ? addv_optab : add_optab;
7266 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7267 something else, make sure we add the register to the constant and
7268 then to the other thing. This case can occur during strength
7269 reduction and doing it this way will produce better code if the
7270 frame pointer or argument pointer is eliminated.
7272 fold-const.c will ensure that the constant is always in the inner
7273 PLUS_EXPR, so the only case we need to do anything about is if
7274 sp, ap, or fp is our second argument, in which case we must swap
7275 the innermost first argument and our second argument. */
7277 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7278 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7279 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7280 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7281 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7282 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7284 tree t = TREE_OPERAND (exp, 1);
7286 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7287 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7290 /* If the result is to be ptr_mode and we are adding an integer to
7291 something, we might be forming a constant. So try to use
7292 plus_constant. If it produces a sum and we can't accept it,
7293 use force_operand. This allows P = &ARR[const] to generate
7294 efficient code on machines where a SYMBOL_REF is not a valid
7297 If this is an EXPAND_SUM call, always return the sum. */
7298 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7299 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7301 if (modifier == EXPAND_STACK_PARM)
7303 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7305 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7309 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7311 /* Use immed_double_const to ensure that the constant is
7312 truncated according to the mode of OP1, then sign extended
7313 to a HOST_WIDE_INT. Using the constant directly can result
7314 in non-canonical RTL in a 64x32 cross compile. */
7316 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7318 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7319 op1 = plus_constant (op1, INTVAL (constant_part));
7320 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7321 op1 = force_operand (op1, target);
7322 return REDUCE_BIT_FIELD (op1);
7325 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7326 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7327 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7332 (modifier == EXPAND_INITIALIZER
7333 ? EXPAND_INITIALIZER : EXPAND_SUM));
7334 if (! CONSTANT_P (op0))
7336 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7337 VOIDmode, modifier);
7338 /* Return a PLUS if modifier says it's OK. */
7339 if (modifier == EXPAND_SUM
7340 || modifier == EXPAND_INITIALIZER)
7341 return simplify_gen_binary (PLUS, mode, op0, op1);
7344 /* Use immed_double_const to ensure that the constant is
7345 truncated according to the mode of OP1, then sign extended
7346 to a HOST_WIDE_INT. Using the constant directly can result
7347 in non-canonical RTL in a 64x32 cross compile. */
7349 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7351 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7352 op0 = plus_constant (op0, INTVAL (constant_part));
7353 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7354 op0 = force_operand (op0, target);
7355 return REDUCE_BIT_FIELD (op0);
7359 /* No sense saving up arithmetic to be done
7360 if it's all in the wrong mode to form part of an address.
7361 And force_operand won't know whether to sign-extend or
7363 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7364 || mode != ptr_mode)
7366 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7367 subtarget, &op0, &op1, 0);
7368 if (op0 == const0_rtx)
7370 if (op1 == const0_rtx)
7375 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7376 subtarget, &op0, &op1, modifier);
7377 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7380 /* For initializers, we are allowed to return a MINUS of two
7381 symbolic constants. Here we handle all cases when both operands
7383 /* Handle difference of two symbolic constants,
7384 for the sake of an initializer. */
7385 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7386 && really_constant_p (TREE_OPERAND (exp, 0))
7387 && really_constant_p (TREE_OPERAND (exp, 1)))
7389 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7390 NULL_RTX, &op0, &op1, modifier);
7392 /* If the last operand is a CONST_INT, use plus_constant of
7393 the negated constant. Else make the MINUS. */
7394 if (GET_CODE (op1) == CONST_INT)
7395 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7397 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7400 this_optab = ! unsignedp && flag_trapv
7401 && (GET_MODE_CLASS(mode) == MODE_INT)
7402 ? subv_optab : sub_optab;
7404 /* No sense saving up arithmetic to be done
7405 if it's all in the wrong mode to form part of an address.
7406 And force_operand won't know whether to sign-extend or
7408 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7409 || mode != ptr_mode)
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 subtarget, &op0, &op1, modifier);
7415 /* Convert A - const to A + (-const). */
7416 if (GET_CODE (op1) == CONST_INT)
7418 op1 = negate_rtx (mode, op1);
7419 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7425 /* If first operand is constant, swap them.
7426 Thus the following special case checks need only
7427 check the second operand. */
7428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7430 tree t1 = TREE_OPERAND (exp, 0);
7431 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7432 TREE_OPERAND (exp, 1) = t1;
7435 /* Attempt to return something suitable for generating an
7436 indexed address, for machines that support that. */
7438 if (modifier == EXPAND_SUM && mode == ptr_mode
7439 && host_integerp (TREE_OPERAND (exp, 1), 0))
7441 tree exp1 = TREE_OPERAND (exp, 1);
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7447 op0 = force_operand (op0, NULL_RTX);
7449 op0 = copy_to_mode_reg (mode, op0);
7451 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7452 gen_int_mode (tree_low_cst (exp1, 0),
7453 TYPE_MODE (TREE_TYPE (exp1)))));
7456 if (modifier == EXPAND_STACK_PARM)
7459 /* Check for multiplying things that have been extended
7460 from a narrower type. If this machine supports multiplying
7461 in that narrower type with a result in the desired type,
7462 do it that way, and avoid the explicit type-conversion. */
7463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7464 && TREE_CODE (type) == INTEGER_TYPE
7465 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7466 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7467 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7468 && int_fits_type_p (TREE_OPERAND (exp, 1),
7469 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7470 /* Don't use a widening multiply if a shift will do. */
7471 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7472 > HOST_BITS_PER_WIDE_INT)
7473 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7475 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7476 && (TYPE_PRECISION (TREE_TYPE
7477 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7478 == TYPE_PRECISION (TREE_TYPE
7480 (TREE_OPERAND (exp, 0), 0))))
7481 /* If both operands are extended, they must either both
7482 be zero-extended or both be sign-extended. */
7483 && (TYPE_UNSIGNED (TREE_TYPE
7484 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7485 == TYPE_UNSIGNED (TREE_TYPE
7487 (TREE_OPERAND (exp, 0), 0)))))))
7489 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7490 enum machine_mode innermode = TYPE_MODE (op0type);
7491 bool zextend_p = TYPE_UNSIGNED (op0type);
7492 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7493 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7495 if (mode == GET_MODE_WIDER_MODE (innermode))
7497 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7499 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7500 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7501 TREE_OPERAND (exp, 1),
7502 NULL_RTX, &op0, &op1, 0);
7504 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7505 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7506 NULL_RTX, &op0, &op1, 0);
7509 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7510 && innermode == word_mode)
7513 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7514 NULL_RTX, VOIDmode, 0);
7515 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7516 op1 = convert_modes (innermode, mode,
7517 expand_expr (TREE_OPERAND (exp, 1),
7518 NULL_RTX, VOIDmode, 0),
7521 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7522 NULL_RTX, VOIDmode, 0);
7523 temp = expand_binop (mode, other_optab, op0, op1, target,
7524 unsignedp, OPTAB_LIB_WIDEN);
7525 hipart = gen_highpart (innermode, temp);
7526 htem = expand_mult_highpart_adjust (innermode, hipart,
7530 emit_move_insn (hipart, htem);
7531 return REDUCE_BIT_FIELD (temp);
7535 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7536 subtarget, &op0, &op1, 0);
7537 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7539 case TRUNC_DIV_EXPR:
7540 case FLOOR_DIV_EXPR:
7542 case ROUND_DIV_EXPR:
7543 case EXACT_DIV_EXPR:
7544 if (modifier == EXPAND_STACK_PARM)
7546 /* Possible optimization: compute the dividend with EXPAND_SUM
7547 then if the divisor is constant can optimize the case
7548 where some terms of the dividend have coeffs divisible by it. */
7549 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7550 subtarget, &op0, &op1, 0);
7551 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7554 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7555 expensive divide. If not, combine will rebuild the original
7557 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7558 && TREE_CODE (type) == REAL_TYPE
7559 && !real_onep (TREE_OPERAND (exp, 0)))
7560 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7561 build (RDIV_EXPR, type,
7562 build_real (type, dconst1),
7563 TREE_OPERAND (exp, 1))),
7564 target, tmode, modifier);
7565 this_optab = sdiv_optab;
7568 case TRUNC_MOD_EXPR:
7569 case FLOOR_MOD_EXPR:
7571 case ROUND_MOD_EXPR:
7572 if (modifier == EXPAND_STACK_PARM)
7574 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7575 subtarget, &op0, &op1, 0);
7576 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7578 case FIX_ROUND_EXPR:
7579 case FIX_FLOOR_EXPR:
7581 abort (); /* Not used for C. */
7583 case FIX_TRUNC_EXPR:
7584 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7585 if (target == 0 || modifier == EXPAND_STACK_PARM)
7586 target = gen_reg_rtx (mode);
7587 expand_fix (target, op0, unsignedp);
7591 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7592 if (target == 0 || modifier == EXPAND_STACK_PARM)
7593 target = gen_reg_rtx (mode);
7594 /* expand_float can't figure out what to do if FROM has VOIDmode.
7595 So give it the correct mode. With -O, cse will optimize this. */
7596 if (GET_MODE (op0) == VOIDmode)
7597 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7599 expand_float (target, op0,
7600 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7605 if (modifier == EXPAND_STACK_PARM)
7607 temp = expand_unop (mode,
7608 ! unsignedp && flag_trapv
7609 && (GET_MODE_CLASS(mode) == MODE_INT)
7610 ? negv_optab : neg_optab, op0, target, 0);
7613 return REDUCE_BIT_FIELD (temp);
7616 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7617 if (modifier == EXPAND_STACK_PARM)
7620 /* ABS_EXPR is not valid for complex arguments. */
7621 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7622 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7625 /* Unsigned abs is simply the operand. Testing here means we don't
7626 risk generating incorrect code below. */
7627 if (TYPE_UNSIGNED (type))
7630 return expand_abs (mode, op0, target, unsignedp,
7631 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7635 target = original_target;
7637 || modifier == EXPAND_STACK_PARM
7638 || (MEM_P (target) && MEM_VOLATILE_P (target))
7639 || GET_MODE (target) != mode
7641 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7642 target = gen_reg_rtx (mode);
7643 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7644 target, &op0, &op1, 0);
7646 /* First try to do it with a special MIN or MAX instruction.
7647 If that does not win, use a conditional jump to select the proper
7649 this_optab = (unsignedp
7650 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7651 : (code == MIN_EXPR ? smin_optab : smax_optab));
7653 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7658 /* At this point, a MEM target is no longer useful; we will get better
7662 target = gen_reg_rtx (mode);
7664 /* If op1 was placed in target, swap op0 and op1. */
7665 if (target != op0 && target == op1)
7673 emit_move_insn (target, op0);
7675 op0 = gen_label_rtx ();
7677 /* If this mode is an integer too wide to compare properly,
7678 compare word by word. Rely on cse to optimize constant cases. */
7679 if (GET_MODE_CLASS (mode) == MODE_INT
7680 && ! can_compare_p (GE, mode, ccp_jump))
7682 if (code == MAX_EXPR)
7683 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7686 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7691 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7692 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7694 emit_move_insn (target, op1);
7699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7700 if (modifier == EXPAND_STACK_PARM)
7702 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7707 /* ??? Can optimize bitwise operations with one arg constant.
7708 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7709 and (a bitwise1 b) bitwise2 b (etc)
7710 but that is probably not worth while. */
7712 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7713 boolean values when we want in all cases to compute both of them. In
7714 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7715 as actual zero-or-1 values and then bitwise anding. In cases where
7716 there cannot be any side effects, better code would be made by
7717 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7718 how to recognize those cases. */
7720 case TRUTH_AND_EXPR:
7722 this_optab = and_optab;
7727 this_optab = ior_optab;
7730 case TRUTH_XOR_EXPR:
7732 this_optab = xor_optab;
7739 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7741 if (modifier == EXPAND_STACK_PARM)
7743 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7744 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7747 /* Could determine the answer when only additive constants differ. Also,
7748 the addition of one can be handled by changing the condition. */
7755 case UNORDERED_EXPR:
7763 temp = do_store_flag (exp,
7764 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7765 tmode != VOIDmode ? tmode : mode, 0);
7769 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7770 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7772 && REG_P (original_target)
7773 && (GET_MODE (original_target)
7774 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7776 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7779 /* If temp is constant, we can just compute the result. */
7780 if (GET_CODE (temp) == CONST_INT)
7782 if (INTVAL (temp) != 0)
7783 emit_move_insn (target, const1_rtx);
7785 emit_move_insn (target, const0_rtx);
7790 if (temp != original_target)
7792 enum machine_mode mode1 = GET_MODE (temp);
7793 if (mode1 == VOIDmode)
7794 mode1 = tmode != VOIDmode ? tmode : mode;
7796 temp = copy_to_mode_reg (mode1, temp);
7799 op1 = gen_label_rtx ();
7800 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7801 GET_MODE (temp), unsignedp, op1);
7802 emit_move_insn (temp, const1_rtx);
7807 /* If no set-flag instruction, must generate a conditional
7808 store into a temporary variable. Drop through
7809 and handle this like && and ||. */
7811 case TRUTH_ANDIF_EXPR:
7812 case TRUTH_ORIF_EXPR:
7815 || modifier == EXPAND_STACK_PARM
7816 || ! safe_from_p (target, exp, 1)
7817 /* Make sure we don't have a hard reg (such as function's return
7818 value) live across basic blocks, if not optimizing. */
7819 || (!optimize && REG_P (target)
7820 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7821 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7824 emit_clr_insn (target);
7826 op1 = gen_label_rtx ();
7827 jumpifnot (exp, op1);
7830 emit_0_to_1_insn (target);
7833 return ignore ? const0_rtx : target;
7835 case TRUTH_NOT_EXPR:
7836 if (modifier == EXPAND_STACK_PARM)
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7839 /* The parser is careful to generate TRUTH_NOT_EXPR
7840 only with operands that are always zero or one. */
7841 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7842 target, 1, OPTAB_LIB_WIDEN);
7848 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7849 return expand_expr_real (TREE_OPERAND (exp, 1),
7850 (ignore ? const0_rtx : target),
7851 VOIDmode, modifier, alt_rtl);
7853 case STATEMENT_LIST:
7855 tree_stmt_iterator iter;
7860 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7861 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7866 /* If it's void, we don't need to worry about computing a value. */
7867 if (VOID_TYPE_P (TREE_TYPE (exp)))
7869 tree pred = TREE_OPERAND (exp, 0);
7870 tree then_ = TREE_OPERAND (exp, 1);
7871 tree else_ = TREE_OPERAND (exp, 2);
7873 if (TREE_CODE (then_) == GOTO_EXPR
7874 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
7876 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7877 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7879 else if (TREE_CODE (else_) == GOTO_EXPR
7880 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
7882 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
7883 return expand_expr (then_, const0_rtx, VOIDmode, 0);
7886 /* Just use the 'if' machinery. */
7887 expand_start_cond (pred, 0);
7888 expand_expr (then_, const0_rtx, VOIDmode, 0);
7892 /* Iterate over 'else if's instead of recursing. */
7893 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
7895 expand_start_else ();
7896 if (EXPR_HAS_LOCATION (exp))
7898 emit_line_note (EXPR_LOCATION (exp));
7899 record_block_change (TREE_BLOCK (exp));
7901 expand_elseif (TREE_OPERAND (exp, 0));
7902 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
7904 /* Don't emit the jump and label if there's no 'else' clause. */
7905 if (TREE_SIDE_EFFECTS (exp))
7907 expand_start_else ();
7908 expand_expr (exp, const0_rtx, VOIDmode, 0);
7914 /* If we would have a "singleton" (see below) were it not for a
7915 conversion in each arm, bring that conversion back out. */
7916 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7917 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7918 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7919 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7921 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7922 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7924 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7925 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7926 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7927 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7928 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7929 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7930 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7931 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7932 return expand_expr (build1 (NOP_EXPR, type,
7933 build (COND_EXPR, TREE_TYPE (iftrue),
7934 TREE_OPERAND (exp, 0),
7936 target, tmode, modifier);
7940 /* Note that COND_EXPRs whose type is a structure or union
7941 are required to be constructed to contain assignments of
7942 a temporary variable, so that we can evaluate them here
7943 for side effect only. If type is void, we must do likewise. */
7945 /* If an arm of the branch requires a cleanup,
7946 only that cleanup is performed. */
7949 tree binary_op = 0, unary_op = 0;
7951 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7952 convert it to our mode, if necessary. */
7953 if (integer_onep (TREE_OPERAND (exp, 1))
7954 && integer_zerop (TREE_OPERAND (exp, 2))
7955 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7959 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7964 if (modifier == EXPAND_STACK_PARM)
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
7967 if (GET_MODE (op0) == mode)
7971 target = gen_reg_rtx (mode);
7972 convert_move (target, op0, unsignedp);
7976 /* Check for X ? A + B : A. If we have this, we can copy A to the
7977 output and conditionally add B. Similarly for unary operations.
7978 Don't do this if X has side-effects because those side effects
7979 might affect A or B and the "?" operation is a sequence point in
7980 ANSI. (operand_equal_p tests for side effects.) */
7982 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7983 && operand_equal_p (TREE_OPERAND (exp, 2),
7984 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7985 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7986 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7987 && operand_equal_p (TREE_OPERAND (exp, 1),
7988 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7989 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7990 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7991 && operand_equal_p (TREE_OPERAND (exp, 2),
7992 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7993 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7994 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7995 && operand_equal_p (TREE_OPERAND (exp, 1),
7996 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7997 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7999 /* If we are not to produce a result, we have no target. Otherwise,
8000 if a target was specified use it; it will not be used as an
8001 intermediate target unless it is safe. If no target, use a
8006 else if (modifier == EXPAND_STACK_PARM)
8007 temp = assign_temp (type, 0, 0, 1);
8008 else if (original_target
8009 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8010 || (singleton && REG_P (original_target)
8011 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8012 && original_target == var_rtx (singleton)))
8013 && GET_MODE (original_target) == mode
8014 #ifdef HAVE_conditional_move
8015 && (! can_conditionally_move_p (mode)
8016 || REG_P (original_target)
8017 || TREE_ADDRESSABLE (type))
8019 && (!MEM_P (original_target)
8020 || TREE_ADDRESSABLE (type)))
8021 temp = original_target;
8022 else if (TREE_ADDRESSABLE (type))
8025 temp = assign_temp (type, 0, 0, 1);
8027 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8028 do the test of X as a store-flag operation, do this as
8029 A + ((X != 0) << log C). Similarly for other simple binary
8030 operators. Only do for C == 1 if BRANCH_COST is low. */
8031 if (temp && singleton && binary_op
8032 && (TREE_CODE (binary_op) == PLUS_EXPR
8033 || TREE_CODE (binary_op) == MINUS_EXPR
8034 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8035 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8036 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8037 : integer_onep (TREE_OPERAND (binary_op, 1)))
8038 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8042 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8043 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8044 ? addv_optab : add_optab)
8045 : TREE_CODE (binary_op) == MINUS_EXPR
8046 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8047 ? subv_optab : sub_optab)
8048 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8051 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8052 if (singleton == TREE_OPERAND (exp, 1))
8053 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8055 cond = TREE_OPERAND (exp, 0);
8057 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8059 mode, BRANCH_COST <= 1);
8061 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8062 result = expand_shift (LSHIFT_EXPR, mode, result,
8063 build_int_2 (tree_log2
8067 (safe_from_p (temp, singleton, 1)
8068 ? temp : NULL_RTX), 0);
8072 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8073 return expand_binop (mode, boptab, op1, result, temp,
8074 unsignedp, OPTAB_LIB_WIDEN);
8078 do_pending_stack_adjust ();
8080 op0 = gen_label_rtx ();
8082 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8086 /* If the target conflicts with the other operand of the
8087 binary op, we can't use it. Also, we can't use the target
8088 if it is a hard register, because evaluating the condition
8089 might clobber it. */
8091 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8093 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8094 temp = gen_reg_rtx (mode);
8095 store_expr (singleton, temp,
8096 modifier == EXPAND_STACK_PARM ? 2 : 0);
8099 expand_expr (singleton,
8100 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8101 if (singleton == TREE_OPERAND (exp, 1))
8102 jumpif (TREE_OPERAND (exp, 0), op0);
8104 jumpifnot (TREE_OPERAND (exp, 0), op0);
8106 if (binary_op && temp == 0)
8107 /* Just touch the other operand. */
8108 expand_expr (TREE_OPERAND (binary_op, 1),
8109 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8111 store_expr (build (TREE_CODE (binary_op), type,
8112 make_tree (type, temp),
8113 TREE_OPERAND (binary_op, 1)),
8114 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8116 store_expr (build1 (TREE_CODE (unary_op), type,
8117 make_tree (type, temp)),
8118 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8121 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8122 comparison operator. If we have one of these cases, set the
8123 output to A, branch on A (cse will merge these two references),
8124 then set the output to FOO. */
8126 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8127 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8128 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8129 TREE_OPERAND (exp, 1), 0)
8130 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8131 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8132 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8135 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8136 temp = gen_reg_rtx (mode);
8137 store_expr (TREE_OPERAND (exp, 1), temp,
8138 modifier == EXPAND_STACK_PARM ? 2 : 0);
8139 jumpif (TREE_OPERAND (exp, 0), op0);
8141 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8142 store_expr (TREE_OPERAND (exp, 2), temp,
8143 modifier == EXPAND_STACK_PARM ? 2 : 0);
8145 expand_expr (TREE_OPERAND (exp, 2),
8146 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8150 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8151 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8152 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8153 TREE_OPERAND (exp, 2), 0)
8154 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8155 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8156 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8159 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8160 temp = gen_reg_rtx (mode);
8161 store_expr (TREE_OPERAND (exp, 2), temp,
8162 modifier == EXPAND_STACK_PARM ? 2 : 0);
8163 jumpifnot (TREE_OPERAND (exp, 0), op0);
8165 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8166 store_expr (TREE_OPERAND (exp, 1), temp,
8167 modifier == EXPAND_STACK_PARM ? 2 : 0);
8169 expand_expr (TREE_OPERAND (exp, 1),
8170 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8175 op1 = gen_label_rtx ();
8176 jumpifnot (TREE_OPERAND (exp, 0), op0);
8178 /* One branch of the cond can be void, if it never returns. For
8179 example A ? throw : E */
8181 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8182 store_expr (TREE_OPERAND (exp, 1), temp,
8183 modifier == EXPAND_STACK_PARM ? 2 : 0);
8185 expand_expr (TREE_OPERAND (exp, 1),
8186 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8187 emit_jump_insn (gen_jump (op1));
8191 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8192 store_expr (TREE_OPERAND (exp, 2), temp,
8193 modifier == EXPAND_STACK_PARM ? 2 : 0);
8195 expand_expr (TREE_OPERAND (exp, 2),
8196 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8207 tree lhs = TREE_OPERAND (exp, 0);
8208 tree rhs = TREE_OPERAND (exp, 1);
8210 temp = expand_assignment (lhs, rhs, ! ignore);
8216 /* If lhs is complex, expand calls in rhs before computing it.
8217 That's so we don't compute a pointer and save it over a
8218 call. If lhs is simple, compute it first so we can give it
8219 as a target if the rhs is just a call. This avoids an
8220 extra temp and copy and that prevents a partial-subsumption
8221 which makes bad code. Actually we could treat
8222 component_ref's of vars like vars. */
8224 tree lhs = TREE_OPERAND (exp, 0);
8225 tree rhs = TREE_OPERAND (exp, 1);
8229 /* Check for |= or &= of a bitfield of size one into another bitfield
8230 of size 1. In this case, (unless we need the result of the
8231 assignment) we can do this more efficiently with a
8232 test followed by an assignment, if necessary.
8234 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8235 things change so we do, this code should be enhanced to
8238 && TREE_CODE (lhs) == COMPONENT_REF
8239 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8240 || TREE_CODE (rhs) == BIT_AND_EXPR)
8241 && TREE_OPERAND (rhs, 0) == lhs
8242 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8243 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8244 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8246 rtx label = gen_label_rtx ();
8248 do_jump (TREE_OPERAND (rhs, 1),
8249 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8250 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8251 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8252 (TREE_CODE (rhs) == BIT_IOR_EXPR
8254 : integer_zero_node)),
8256 do_pending_stack_adjust ();
8261 temp = expand_assignment (lhs, rhs, ! ignore);
8267 if (!TREE_OPERAND (exp, 0))
8268 expand_null_return ();
8270 expand_return (TREE_OPERAND (exp, 0));
8274 if (modifier == EXPAND_STACK_PARM)
8276 /* If we are taking the address of something erroneous, just
8278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8280 /* If we are taking the address of a constant and are at the
8281 top level, we have to use output_constant_def since we can't
8282 call force_const_mem at top level. */
8284 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8285 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8287 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8290 /* We make sure to pass const0_rtx down if we came in with
8291 ignore set, to avoid doing the cleanups twice for something. */
8292 op0 = expand_expr (TREE_OPERAND (exp, 0),
8293 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8294 (modifier == EXPAND_INITIALIZER
8295 ? modifier : EXPAND_CONST_ADDRESS));
8297 /* If we are going to ignore the result, OP0 will have been set
8298 to const0_rtx, so just return it. Don't get confused and
8299 think we are taking the address of the constant. */
8303 /* We would like the object in memory. If it is a constant, we can
8304 have it be statically allocated into memory. For a non-constant,
8305 we need to allocate some memory and store the value into it. */
8307 if (CONSTANT_P (op0))
8308 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8310 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8311 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8312 || GET_CODE (op0) == LO_SUM)
8314 /* If this object is in a register, it can't be BLKmode. */
8315 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8316 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8318 if (GET_CODE (op0) == PARALLEL)
8319 /* Handle calls that pass values in multiple
8320 non-contiguous locations. The Irix 6 ABI has examples
8322 emit_group_store (memloc, op0, inner_type,
8323 int_size_in_bytes (inner_type));
8325 emit_move_insn (memloc, op0);
8333 mark_temp_addr_taken (op0);
8334 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8336 op0 = XEXP (op0, 0);
8337 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8338 op0 = convert_memory_address (ptr_mode, op0);
8342 /* If OP0 is not aligned as least as much as the type requires, we
8343 need to make a temporary, copy OP0 to it, and take the address of
8344 the temporary. We want to use the alignment of the type, not of
8345 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8346 the test for BLKmode means that can't happen. The test for
8347 BLKmode is because we never make mis-aligned MEMs with
8350 We don't need to do this at all if the machine doesn't have
8351 strict alignment. */
8352 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8353 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8355 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8357 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8360 if (TYPE_ALIGN_OK (inner_type))
8363 if (TREE_ADDRESSABLE (inner_type))
8365 /* We can't make a bitwise copy of this object, so fail. */
8366 error ("cannot take the address of an unaligned member");
8370 new = assign_stack_temp_for_type
8371 (TYPE_MODE (inner_type),
8372 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8373 : int_size_in_bytes (inner_type),
8374 1, build_qualified_type (inner_type,
8375 (TYPE_QUALS (inner_type)
8376 | TYPE_QUAL_CONST)));
8378 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8379 (modifier == EXPAND_STACK_PARM
8380 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8385 op0 = force_operand (XEXP (op0, 0), target);
8390 && modifier != EXPAND_CONST_ADDRESS
8391 && modifier != EXPAND_INITIALIZER
8392 && modifier != EXPAND_SUM)
8393 op0 = force_reg (Pmode, op0);
8396 && ! REG_USERVAR_P (op0))
8397 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8399 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8400 op0 = convert_memory_address (ptr_mode, op0);
8404 case ENTRY_VALUE_EXPR:
8407 /* COMPLEX type for Extended Pascal & Fortran */
8410 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8413 /* Get the rtx code of the operands. */
8414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8415 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8418 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8422 /* Move the real (op0) and imaginary (op1) parts to their location. */
8423 emit_move_insn (gen_realpart (mode, target), op0);
8424 emit_move_insn (gen_imagpart (mode, target), op1);
8426 insns = get_insns ();
8429 /* Complex construction should appear as a single unit. */
8430 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8431 each with a separate pseudo as destination.
8432 It's not correct for flow to treat them as a unit. */
8433 if (GET_CODE (target) != CONCAT)
8434 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8443 return gen_realpart (mode, op0);
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8447 return gen_imagpart (mode, op0);
8451 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8458 target = gen_reg_rtx (mode);
8462 /* Store the realpart and the negated imagpart to target. */
8463 emit_move_insn (gen_realpart (partmode, target),
8464 gen_realpart (partmode, op0));
8466 imag_t = gen_imagpart (partmode, target);
8467 temp = expand_unop (partmode,
8468 ! unsignedp && flag_trapv
8469 && (GET_MODE_CLASS(partmode) == MODE_INT)
8470 ? negv_optab : neg_optab,
8471 gen_imagpart (partmode, op0), imag_t, 0);
8473 emit_move_insn (imag_t, temp);
8475 insns = get_insns ();
8478 /* Conjugate should appear as a single unit
8479 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8480 each with a separate pseudo as destination.
8481 It's not correct for flow to treat them as a unit. */
8482 if (GET_CODE (target) != CONCAT)
8483 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8491 expand_resx_expr (exp);
8494 case TRY_CATCH_EXPR:
8496 case EH_FILTER_EXPR:
8497 case TRY_FINALLY_EXPR:
8498 /* Lowered by tree-eh.c. */
8501 case WITH_CLEANUP_EXPR:
8502 case CLEANUP_POINT_EXPR:
8504 /* Lowered by gimplify.c. */
8508 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8511 return get_exception_pointer (cfun);
8514 return get_exception_filter (cfun);
8516 case PREINCREMENT_EXPR:
8517 case PREDECREMENT_EXPR:
8518 case POSTINCREMENT_EXPR:
8519 case POSTDECREMENT_EXPR:
8521 /* Function descriptors are not valid except for as
8522 initialization constants, and should not be expanded. */
8526 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
8528 if (SWITCH_BODY (exp))
8529 expand_expr_stmt (SWITCH_BODY (exp));
8530 if (SWITCH_LABELS (exp))
8533 tree vec = SWITCH_LABELS (exp);
8534 size_t i, n = TREE_VEC_LENGTH (vec);
8536 for (i = 0; i < n; ++i)
8538 tree elt = TREE_VEC_ELT (vec, i);
8539 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
8540 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
8541 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
8543 tree case_low = CASE_LOW (elt);
8544 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
8545 if (case_low && case_high)
8547 /* Case label is less than minimum for type. */
8548 if (TREE_CODE (min_value) == INTEGER_CST
8549 && tree_int_cst_compare (case_low, min_value) < 0
8550 && tree_int_cst_compare (case_high, min_value) < 0)
8552 warning ("case label value %d is less than minimum value for type",
8553 TREE_INT_CST (case_low));
8557 /* Case value is greater than maximum for type. */
8558 if (TREE_CODE (max_value) == INTEGER_CST
8559 && tree_int_cst_compare (case_low, max_value) > 0
8560 && tree_int_cst_compare (case_high, max_value) > 0)
8562 warning ("case label value %d exceeds maximum value for type",
8563 TREE_INT_CST (case_high));
8567 /* Saturate lower case label value to minimum. */
8568 if (TREE_CODE (min_value) == INTEGER_CST
8569 && tree_int_cst_compare (case_high, min_value) >= 0
8570 && tree_int_cst_compare (case_low, min_value) < 0)
8572 warning ("lower value %d in case label range less than minimum value for type",
8573 TREE_INT_CST (case_low));
8574 case_low = min_value;
8577 /* Saturate upper case label value to maximum. */
8578 if (TREE_CODE (max_value) == INTEGER_CST
8579 && tree_int_cst_compare (case_low, max_value) <= 0
8580 && tree_int_cst_compare (case_high, max_value) > 0)
8582 warning ("upper value %d in case label range exceeds maximum value for type",
8583 TREE_INT_CST (case_high));
8584 case_high = max_value;
8588 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
8593 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8597 expand_label (TREE_OPERAND (exp, 0));
8600 case CASE_LABEL_EXPR:
8603 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
8611 expand_asm_expr (exp);
8615 return lang_hooks.expand_expr (exp, original_target, tmode,
8619 /* Here to do an ordinary binary operator, generating an instruction
8620 from the optab already placed in `this_optab'. */
8622 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8623 subtarget, &op0, &op1, 0);
8625 if (modifier == EXPAND_STACK_PARM)
8627 temp = expand_binop (mode, this_optab, op0, op1, target,
8628 unsignedp, OPTAB_LIB_WIDEN);
8631 return REDUCE_BIT_FIELD (temp);
8633 #undef REDUCE_BIT_FIELD
8635 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8636 signedness of TYPE), possibly returning the result in TARGET. */
8638 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8640 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8641 if (target && GET_MODE (target) != GET_MODE (exp))
8643 if (TYPE_UNSIGNED (type))
8646 if (prec < HOST_BITS_PER_WIDE_INT)
8647 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8650 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8651 ((unsigned HOST_WIDE_INT) 1
8652 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8654 return expand_and (GET_MODE (exp), exp, mask, target);
8658 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8659 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8660 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8664 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8665 when applied to the address of EXP produces an address known to be
8666 aligned more than BIGGEST_ALIGNMENT. */
8669 is_aligning_offset (tree offset, tree exp)
8671 /* Strip off any conversions. */
8672 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8673 || TREE_CODE (offset) == NOP_EXPR
8674 || TREE_CODE (offset) == CONVERT_EXPR)
8675 offset = TREE_OPERAND (offset, 0);
8677 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8678 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8679 if (TREE_CODE (offset) != BIT_AND_EXPR
8680 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8681 || compare_tree_int (TREE_OPERAND (offset, 1),
8682 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8683 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8686 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8687 It must be NEGATE_EXPR. Then strip any more conversions. */
8688 offset = TREE_OPERAND (offset, 0);
8689 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8690 || TREE_CODE (offset) == NOP_EXPR
8691 || TREE_CODE (offset) == CONVERT_EXPR)
8692 offset = TREE_OPERAND (offset, 0);
8694 if (TREE_CODE (offset) != NEGATE_EXPR)
8697 offset = TREE_OPERAND (offset, 0);
8698 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8699 || TREE_CODE (offset) == NOP_EXPR
8700 || TREE_CODE (offset) == CONVERT_EXPR)
8701 offset = TREE_OPERAND (offset, 0);
8703 /* This must now be the address of EXP. */
8704 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8707 /* Return the tree node if an ARG corresponds to a string constant or zero
8708 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8709 in bytes within the string that ARG is accessing. The type of the
8710 offset will be `sizetype'. */
8713 string_constant (tree arg, tree *ptr_offset)
8717 if (TREE_CODE (arg) == ADDR_EXPR
8718 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8720 *ptr_offset = size_zero_node;
8721 return TREE_OPERAND (arg, 0);
8723 if (TREE_CODE (arg) == ADDR_EXPR
8724 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8725 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8727 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8728 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8730 else if (TREE_CODE (arg) == PLUS_EXPR)
8732 tree arg0 = TREE_OPERAND (arg, 0);
8733 tree arg1 = TREE_OPERAND (arg, 1);
8738 if (TREE_CODE (arg0) == ADDR_EXPR
8739 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8741 *ptr_offset = convert (sizetype, arg1);
8742 return TREE_OPERAND (arg0, 0);
8744 else if (TREE_CODE (arg1) == ADDR_EXPR
8745 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8747 *ptr_offset = convert (sizetype, arg0);
8748 return TREE_OPERAND (arg1, 0);
8755 /* Generate code to calculate EXP using a store-flag instruction
8756 and return an rtx for the result. EXP is either a comparison
8757 or a TRUTH_NOT_EXPR whose operand is a comparison.
8759 If TARGET is nonzero, store the result there if convenient.
8761 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8764 Return zero if there is no suitable set-flag instruction
8765 available on this machine.
8767 Once expand_expr has been called on the arguments of the comparison,
8768 we are committed to doing the store flag, since it is not safe to
8769 re-evaluate the expression. We emit the store-flag insn by calling
8770 emit_store_flag, but only expand the arguments if we have a reason
8771 to believe that emit_store_flag will be successful. If we think that
8772 it will, but it isn't, we have to simulate the store-flag with a
8773 set/jump/set sequence. */
8776 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8779 tree arg0, arg1, type;
8781 enum machine_mode operand_mode;
8785 enum insn_code icode;
8786 rtx subtarget = target;
8789 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8790 result at the end. We can't simply invert the test since it would
8791 have already been inverted if it were valid. This case occurs for
8792 some floating-point comparisons. */
8794 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8795 invert = 1, exp = TREE_OPERAND (exp, 0);
8797 arg0 = TREE_OPERAND (exp, 0);
8798 arg1 = TREE_OPERAND (exp, 1);
8800 /* Don't crash if the comparison was erroneous. */
8801 if (arg0 == error_mark_node || arg1 == error_mark_node)
8804 type = TREE_TYPE (arg0);
8805 operand_mode = TYPE_MODE (type);
8806 unsignedp = TYPE_UNSIGNED (type);
8808 /* We won't bother with BLKmode store-flag operations because it would mean
8809 passing a lot of information to emit_store_flag. */
8810 if (operand_mode == BLKmode)
8813 /* We won't bother with store-flag operations involving function pointers
8814 when function pointers must be canonicalized before comparisons. */
8815 #ifdef HAVE_canonicalize_funcptr_for_compare
8816 if (HAVE_canonicalize_funcptr_for_compare
8817 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8818 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8820 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8821 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8822 == FUNCTION_TYPE))))
8829 /* Get the rtx comparison code to use. We know that EXP is a comparison
8830 operation of some type. Some comparisons against 1 and -1 can be
8831 converted to comparisons with zero. Do so here so that the tests
8832 below will be aware that we have a comparison with zero. These
8833 tests will not catch constants in the first operand, but constants
8834 are rarely passed as the first operand. */
8836 switch (TREE_CODE (exp))
8845 if (integer_onep (arg1))
8846 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8848 code = unsignedp ? LTU : LT;
8851 if (! unsignedp && integer_all_onesp (arg1))
8852 arg1 = integer_zero_node, code = LT;
8854 code = unsignedp ? LEU : LE;
8857 if (! unsignedp && integer_all_onesp (arg1))
8858 arg1 = integer_zero_node, code = GE;
8860 code = unsignedp ? GTU : GT;
8863 if (integer_onep (arg1))
8864 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8866 code = unsignedp ? GEU : GE;
8869 case UNORDERED_EXPR:
8898 /* Put a constant second. */
8899 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8901 tem = arg0; arg0 = arg1; arg1 = tem;
8902 code = swap_condition (code);
8905 /* If this is an equality or inequality test of a single bit, we can
8906 do this by shifting the bit being tested to the low-order bit and
8907 masking the result with the constant 1. If the condition was EQ,
8908 we xor it with 1. This does not require an scc insn and is faster
8909 than an scc insn even if we have it.
8911 The code to make this transformation was moved into fold_single_bit_test,
8912 so we just call into the folder and expand its result. */
8914 if ((code == NE || code == EQ)
8915 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8916 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8918 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8919 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8921 target, VOIDmode, EXPAND_NORMAL);
8924 /* Now see if we are likely to be able to do this. Return if not. */
8925 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8928 icode = setcc_gen_code[(int) code];
8929 if (icode == CODE_FOR_nothing
8930 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8932 /* We can only do this if it is one of the special cases that
8933 can be handled without an scc insn. */
8934 if ((code == LT && integer_zerop (arg1))
8935 || (! only_cheap && code == GE && integer_zerop (arg1)))
8937 else if (BRANCH_COST >= 0
8938 && ! only_cheap && (code == NE || code == EQ)
8939 && TREE_CODE (type) != REAL_TYPE
8940 && ((abs_optab->handlers[(int) operand_mode].insn_code
8941 != CODE_FOR_nothing)
8942 || (ffs_optab->handlers[(int) operand_mode].insn_code
8943 != CODE_FOR_nothing)))
8949 if (! get_subtarget (target)
8950 || GET_MODE (subtarget) != operand_mode)
8953 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8956 target = gen_reg_rtx (mode);
8958 result = emit_store_flag (target, code, op0, op1,
8959 operand_mode, unsignedp, 1);
8964 result = expand_binop (mode, xor_optab, result, const1_rtx,
8965 result, 0, OPTAB_LIB_WIDEN);
8969 /* If this failed, we have to do this with set/compare/jump/set code. */
8971 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8972 target = gen_reg_rtx (GET_MODE (target));
8974 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8975 result = compare_from_rtx (op0, op1, code, unsignedp,
8976 operand_mode, NULL_RTX);
8977 if (GET_CODE (result) == CONST_INT)
8978 return (((result == const0_rtx && ! invert)
8979 || (result != const0_rtx && invert))
8980 ? const0_rtx : const1_rtx);
8982 /* The code of RESULT may not match CODE if compare_from_rtx
8983 decided to swap its operands and reverse the original code.
8985 We know that compare_from_rtx returns either a CONST_INT or
8986 a new comparison code, so it is safe to just extract the
8987 code from RESULT. */
8988 code = GET_CODE (result);
8990 label = gen_label_rtx ();
8991 if (bcc_gen_fctn[(int) code] == 0)
8994 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8995 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9002 /* Stubs in case we haven't got a casesi insn. */
9004 # define HAVE_casesi 0
9005 # define gen_casesi(a, b, c, d, e) (0)
9006 # define CODE_FOR_casesi CODE_FOR_nothing
9009 /* If the machine does not have a case insn that compares the bounds,
9010 this means extra overhead for dispatch tables, which raises the
9011 threshold for using them. */
9012 #ifndef CASE_VALUES_THRESHOLD
9013 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9014 #endif /* CASE_VALUES_THRESHOLD */
9017 case_values_threshold (void)
9019 return CASE_VALUES_THRESHOLD;
9022 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9023 0 otherwise (i.e. if there is no casesi instruction). */
9025 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9026 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9028 enum machine_mode index_mode = SImode;
9029 int index_bits = GET_MODE_BITSIZE (index_mode);
9030 rtx op1, op2, index;
9031 enum machine_mode op_mode;
9036 /* Convert the index to SImode. */
9037 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9039 enum machine_mode omode = TYPE_MODE (index_type);
9040 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9042 /* We must handle the endpoints in the original mode. */
9043 index_expr = build (MINUS_EXPR, index_type,
9044 index_expr, minval);
9045 minval = integer_zero_node;
9046 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9047 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9048 omode, 1, default_label);
9049 /* Now we can safely truncate. */
9050 index = convert_to_mode (index_mode, index, 0);
9054 if (TYPE_MODE (index_type) != index_mode)
9056 index_expr = convert (lang_hooks.types.type_for_size
9057 (index_bits, 0), index_expr);
9058 index_type = TREE_TYPE (index_expr);
9061 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9064 do_pending_stack_adjust ();
9066 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9067 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9069 index = copy_to_mode_reg (op_mode, index);
9071 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9073 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9074 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9075 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9076 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9078 op1 = copy_to_mode_reg (op_mode, op1);
9080 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9082 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9083 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9084 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9085 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9087 op2 = copy_to_mode_reg (op_mode, op2);
9089 emit_jump_insn (gen_casesi (index, op1, op2,
9090 table_label, default_label));
9094 /* Attempt to generate a tablejump instruction; same concept. */
9095 #ifndef HAVE_tablejump
9096 #define HAVE_tablejump 0
9097 #define gen_tablejump(x, y) (0)
9100 /* Subroutine of the next function.
9102 INDEX is the value being switched on, with the lowest value
9103 in the table already subtracted.
9104 MODE is its expected mode (needed if INDEX is constant).
9105 RANGE is the length of the jump table.
9106 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9108 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9109 index value is out of range. */
9112 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9117 if (INTVAL (range) > cfun->max_jumptable_ents)
9118 cfun->max_jumptable_ents = INTVAL (range);
9120 /* Do an unsigned comparison (in the proper mode) between the index
9121 expression and the value which represents the length of the range.
9122 Since we just finished subtracting the lower bound of the range
9123 from the index expression, this comparison allows us to simultaneously
9124 check that the original index expression value is both greater than
9125 or equal to the minimum value of the range and less than or equal to
9126 the maximum value of the range. */
9128 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9131 /* If index is in range, it must fit in Pmode.
9132 Convert to Pmode so we can index with it. */
9134 index = convert_to_mode (Pmode, index, 1);
9136 /* Don't let a MEM slip through, because then INDEX that comes
9137 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9138 and break_out_memory_refs will go to work on it and mess it up. */
9139 #ifdef PIC_CASE_VECTOR_ADDRESS
9140 if (flag_pic && !REG_P (index))
9141 index = copy_to_mode_reg (Pmode, index);
9144 /* If flag_force_addr were to affect this address
9145 it could interfere with the tricky assumptions made
9146 about addresses that contain label-refs,
9147 which may be valid only very near the tablejump itself. */
9148 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9149 GET_MODE_SIZE, because this indicates how large insns are. The other
9150 uses should all be Pmode, because they are addresses. This code
9151 could fail if addresses and insns are not the same size. */
9152 index = gen_rtx_PLUS (Pmode,
9153 gen_rtx_MULT (Pmode, index,
9154 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9155 gen_rtx_LABEL_REF (Pmode, table_label));
9156 #ifdef PIC_CASE_VECTOR_ADDRESS
9158 index = PIC_CASE_VECTOR_ADDRESS (index);
9161 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9162 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9163 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9164 RTX_UNCHANGING_P (vector) = 1;
9165 MEM_NOTRAP_P (vector) = 1;
9166 convert_move (temp, vector, 0);
9168 emit_jump_insn (gen_tablejump (temp, table_label));
9170 /* If we are generating PIC code or if the table is PC-relative, the
9171 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9172 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9177 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9178 rtx table_label, rtx default_label)
9182 if (! HAVE_tablejump)
9185 index_expr = fold (build (MINUS_EXPR, index_type,
9186 convert (index_type, index_expr),
9187 convert (index_type, minval)));
9188 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9189 do_pending_stack_adjust ();
9191 do_tablejump (index, TYPE_MODE (index_type),
9192 convert_modes (TYPE_MODE (index_type),
9193 TYPE_MODE (TREE_TYPE (range)),
9194 expand_expr (range, NULL_RTX,
9196 TYPE_UNSIGNED (TREE_TYPE (range))),
9197 table_label, default_label);
9201 /* Nonzero if the mode is a valid vector mode for this architecture.
9202 This returns nonzero even if there is no hardware support for the
9203 vector mode, but we can emulate with narrower modes. */
9206 vector_mode_valid_p (enum machine_mode mode)
9208 enum mode_class class = GET_MODE_CLASS (mode);
9209 enum machine_mode innermode;
9211 /* Doh! What's going on? */
9212 if (class != MODE_VECTOR_INT
9213 && class != MODE_VECTOR_FLOAT)
9216 /* Hardware support. Woo hoo! */
9217 if (VECTOR_MODE_SUPPORTED_P (mode))
9220 innermode = GET_MODE_INNER (mode);
9222 /* We should probably return 1 if requesting V4DI and we have no DI,
9223 but we have V2DI, but this is probably very unlikely. */
9225 /* If we have support for the inner mode, we can safely emulate it.
9226 We may not have V2DI, but me can emulate with a pair of DIs. */
9227 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9230 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9232 const_vector_from_tree (tree exp)
9237 enum machine_mode inner, mode;
9239 mode = TYPE_MODE (TREE_TYPE (exp));
9241 if (initializer_zerop (exp))
9242 return CONST0_RTX (mode);
9244 units = GET_MODE_NUNITS (mode);
9245 inner = GET_MODE_INNER (mode);
9247 v = rtvec_alloc (units);
9249 link = TREE_VECTOR_CST_ELTS (exp);
9250 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9252 elt = TREE_VALUE (link);
9254 if (TREE_CODE (elt) == REAL_CST)
9255 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9258 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9259 TREE_INT_CST_HIGH (elt),
9263 /* Initialize remaining elements to 0. */
9264 for (; i < units; ++i)
9265 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9267 return gen_rtx_raw_CONST_VECTOR (mode, v);
9269 #include "gt-expr.h"