1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memset" storage with byte values other than zero, or
189 to "memcpy" storage when the source is a constant string. */
190 #ifndef STORE_BY_PIECES_P
191 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movmem_optab[NUM_MACHINE_MODES];
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
200 /* These arrays record the insn_code of two different kinds of insns
201 to perform block compares. */
202 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
203 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
211 /* This is run once per compilation to set up which modes can be used
212 directly in memory and to initialize the block move optab. */
215 init_expr_once (void)
218 enum machine_mode mode;
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
229 /* A scratch register we can modify in-place below to avoid
230 useless RTL allocations. */
231 reg = gen_rtx_REG (VOIDmode, -1);
233 insn = rtx_alloc (INSN);
234 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
235 PATTERN (insn) = pat;
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
242 direct_load[(int) mode] = direct_store[(int) mode] = 0;
243 PUT_MODE (mem, mode);
244 PUT_MODE (mem1, mode);
245 PUT_MODE (reg, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
282 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
285 mode = GET_MODE_WIDER_MODE (mode))
287 enum machine_mode srcmode;
288 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
289 srcmode = GET_MODE_WIDER_MODE (srcmode))
293 ic = can_extend_p (mode, srcmode, 0);
294 if (ic == CODE_FOR_nothing)
297 PUT_MODE (mem, srcmode);
299 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
300 float_extend_from_mem[mode][srcmode] = true;
305 /* This is run at the start of compiling a function. */
310 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
313 /* Copy data from FROM to TO, where the machine modes are not the same.
314 Both modes may be integer, or both may be floating.
315 UNSIGNEDP should be nonzero if FROM is an unsigned type.
316 This causes zero-extension instead of sign-extension. */
319 convert_move (rtx to, rtx from, int unsignedp)
321 enum machine_mode to_mode = GET_MODE (to);
322 enum machine_mode from_mode = GET_MODE (from);
323 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
324 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
328 /* rtx code for making an equivalent value. */
329 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
330 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
333 if (to_real != from_real)
336 /* If the source and destination are already the same, then there's
341 /* If FROM is a SUBREG that indicates that we have already done at least
342 the required extension, strip it. We don't handle such SUBREGs as
345 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
346 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
347 >= GET_MODE_SIZE (to_mode))
348 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
349 from = gen_lowpart (to_mode, from), from_mode = to_mode;
351 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
354 if (to_mode == from_mode
355 || (from_mode == VOIDmode && CONSTANT_P (from)))
357 emit_move_insn (to, from);
361 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
363 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
366 if (VECTOR_MODE_P (to_mode))
367 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
369 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
371 emit_move_insn (to, from);
375 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
377 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
378 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
387 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
389 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
394 /* Try converting directly if the insn is supported. */
396 code = tab->handlers[to_mode][from_mode].insn_code;
397 if (code != CODE_FOR_nothing)
399 emit_unop_insn (code, to, from,
400 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 /* Otherwise use a libcall. */
405 libcall = tab->handlers[to_mode][from_mode].libfunc;
408 /* This conversion is not implemented yet. */
412 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
414 insns = get_insns ();
416 emit_libcall_block (insns, to, value,
417 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
419 : gen_rtx_FLOAT_EXTEND (to_mode, from));
423 /* Handle pointer conversion. */ /* SPEE 900220. */
424 /* Targets are expected to provide conversion insns between PxImode and
425 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
426 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
428 enum machine_mode full_mode
429 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
431 if (trunc_optab->handlers[to_mode][full_mode].insn_code
435 if (full_mode != from_mode)
436 from = convert_to_mode (full_mode, from, unsignedp);
437 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
441 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 enum machine_mode full_mode
444 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
446 if (sext_optab->handlers[full_mode][from_mode].insn_code
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
562 insns = get_insns ();
565 emit_no_conflict_block (insns, to, from, NULL_RTX,
566 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
570 /* Truncating multi-word to a word or less. */
571 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
572 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
575 && ! MEM_VOLATILE_P (from)
576 && direct_load[(int) to_mode]
577 && ! mode_dependent_address_p (XEXP (from, 0)))
579 || GET_CODE (from) == SUBREG))
580 from = force_reg (from_mode, from);
581 convert_move (to, gen_lowpart (word_mode, from), 0);
585 /* Now follow all the conversions between integers
586 no more than a word long. */
588 /* For truncation, usually we can just refer to FROM in a narrower mode. */
589 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
590 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
591 GET_MODE_BITSIZE (from_mode)))
594 && ! MEM_VOLATILE_P (from)
595 && direct_load[(int) to_mode]
596 && ! mode_dependent_address_p (XEXP (from, 0)))
598 || GET_CODE (from) == SUBREG))
599 from = force_reg (from_mode, from);
600 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
601 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
602 from = copy_to_reg (from);
603 emit_move_insn (to, gen_lowpart (to_mode, from));
607 /* Handle extension. */
608 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
610 /* Convert directly if that works. */
611 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
615 from = force_not_mem (from);
617 emit_unop_insn (code, to, from, equiv_code);
622 enum machine_mode intermediate;
626 /* Search for a mode to convert via. */
627 for (intermediate = from_mode; intermediate != VOIDmode;
628 intermediate = GET_MODE_WIDER_MODE (intermediate))
629 if (((can_extend_p (to_mode, intermediate, unsignedp)
631 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (intermediate))))
634 && (can_extend_p (intermediate, from_mode, unsignedp)
635 != CODE_FOR_nothing))
637 convert_move (to, convert_to_mode (intermediate, from,
638 unsignedp), unsignedp);
642 /* No suitable intermediate mode.
643 Generate what we need with shifts. */
644 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode), 0);
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
796 return simplify_gen_subreg (mode, x, oldmode, 0);
799 temp = gen_reg_rtx (mode);
800 convert_move (temp, x, unsignedp);
804 /* STORE_MAX_PIECES is the number of bytes at a time that we can
805 store efficiently. Due to internal GCC limitations, this is
806 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
807 for an immediate constant. */
809 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
811 /* Determine whether the LEN bytes can be moved by using several move
812 instructions. Return nonzero if a call to move_by_pieces should
816 can_move_by_pieces (unsigned HOST_WIDE_INT len,
817 unsigned int align ATTRIBUTE_UNUSED)
819 return MOVE_BY_PIECES_P (len, align);
822 /* Generate several move instructions to copy LEN bytes from block FROM to
823 block TO. (These are MEM rtx's with BLKmode).
825 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
826 used to push FROM to the stack.
828 ALIGN is maximum stack alignment we can assume.
830 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
831 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
835 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
836 unsigned int align, int endp)
838 struct move_by_pieces data;
839 rtx to_addr, from_addr = XEXP (from, 0);
840 unsigned int max_size = MOVE_MAX_PIECES + 1;
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
844 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
847 data.from_addr = from_addr;
850 to_addr = XEXP (to, 0);
853 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
854 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
856 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
863 #ifdef STACK_GROWS_DOWNWARD
869 data.to_addr = to_addr;
872 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
873 || GET_CODE (from_addr) == POST_INC
874 || GET_CODE (from_addr) == POST_DEC);
876 data.explicit_inc_from = 0;
877 data.explicit_inc_to = 0;
878 if (data.reverse) data.offset = len;
881 /* If copying requires more than two move insns,
882 copy addresses to registers (to make displacements shorter)
883 and use post-increment if available. */
884 if (!(data.autinc_from && data.autinc_to)
885 && move_by_pieces_ninsns (len, align) > 2)
887 /* Find the mode of the largest move... */
888 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
889 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
890 if (GET_MODE_SIZE (tmode) < max_size)
893 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
895 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
896 data.autinc_from = 1;
897 data.explicit_inc_from = -1;
899 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (from_addr);
902 data.autinc_from = 1;
903 data.explicit_inc_from = 1;
905 if (!data.autinc_from && CONSTANT_P (from_addr))
906 data.from_addr = copy_addr_to_reg (from_addr);
907 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
911 data.explicit_inc_to = -1;
913 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (to_addr);
917 data.explicit_inc_to = 1;
919 if (!data.autinc_to && CONSTANT_P (to_addr))
920 data.to_addr = copy_addr_to_reg (to_addr);
923 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
924 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
925 align = MOVE_MAX * BITS_PER_UNIT;
927 /* First move what we can in the largest integer mode, then go to
928 successively smaller modes. */
932 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
933 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
934 if (GET_MODE_SIZE (tmode) < max_size)
937 if (mode == VOIDmode)
940 icode = mov_optab->handlers[(int) mode].insn_code;
941 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
942 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
944 max_size = GET_MODE_SIZE (mode);
947 /* The code above should have handled everything. */
961 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
962 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
964 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
967 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
974 to1 = adjust_address (data.to, QImode, data.offset);
982 /* Return number of insns required to move L bytes by pieces.
983 ALIGN (in bits) is maximum alignment we can assume. */
985 static unsigned HOST_WIDE_INT
986 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
988 unsigned HOST_WIDE_INT n_insns = 0;
989 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
991 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
992 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
993 align = MOVE_MAX * BITS_PER_UNIT;
997 enum machine_mode mode = VOIDmode, tmode;
998 enum insn_code icode;
1000 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1001 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1002 if (GET_MODE_SIZE (tmode) < max_size)
1005 if (mode == VOIDmode)
1008 icode = mov_optab->handlers[(int) mode].insn_code;
1009 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1010 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1012 max_size = GET_MODE_SIZE (mode);
1020 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1021 with move instructions for mode MODE. GENFUN is the gen_... function
1022 to make a move insn for that mode. DATA has all the other info. */
1025 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1026 struct move_by_pieces *data)
1028 unsigned int size = GET_MODE_SIZE (mode);
1029 rtx to1 = NULL_RTX, from1;
1031 while (data->len >= size)
1034 data->offset -= size;
1038 if (data->autinc_to)
1039 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1042 to1 = adjust_address (data->to, mode, data->offset);
1045 if (data->autinc_from)
1046 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1049 from1 = adjust_address (data->from, mode, data->offset);
1051 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1052 emit_insn (gen_add2_insn (data->to_addr,
1053 GEN_INT (-(HOST_WIDE_INT)size)));
1054 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1055 emit_insn (gen_add2_insn (data->from_addr,
1056 GEN_INT (-(HOST_WIDE_INT)size)));
1059 emit_insn ((*genfun) (to1, from1));
1062 #ifdef PUSH_ROUNDING
1063 emit_single_push_insn (mode, from1, NULL);
1069 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1070 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1071 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1072 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1074 if (! data->reverse)
1075 data->offset += size;
1081 /* Emit code to move a block Y to a block X. This may be done with
1082 string-move instructions, with multiple scalar move instructions,
1083 or with a library call.
1085 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1086 SIZE is an rtx that says how long they are.
1087 ALIGN is the maximum alignment we can assume they have.
1088 METHOD describes what kind of copy this is, and what mechanisms may be used.
1090 Return the address of the new block, if memcpy is called and returns it,
1094 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1102 case BLOCK_OP_NORMAL:
1103 may_use_call = true;
1106 case BLOCK_OP_CALL_PARM:
1107 may_use_call = block_move_libcall_safe_for_call_parm ();
1109 /* Make inhibit_defer_pop nonzero around the library call
1110 to force it to pop the arguments right away. */
1114 case BLOCK_OP_NO_LIBCALL:
1115 may_use_call = false;
1122 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1131 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1132 block copy is more efficient for other large modes, e.g. DCmode. */
1133 x = adjust_address (x, BLKmode, 0);
1134 y = adjust_address (y, BLKmode, 0);
1136 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1137 can be incorrect is coming from __builtin_memcpy. */
1138 if (GET_CODE (size) == CONST_INT)
1140 if (INTVAL (size) == 0)
1143 x = shallow_copy_rtx (x);
1144 y = shallow_copy_rtx (y);
1145 set_mem_size (x, size);
1146 set_mem_size (y, size);
1149 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1150 move_by_pieces (x, y, INTVAL (size), align, 0);
1151 else if (emit_block_move_via_movmem (x, y, size, align))
1153 else if (may_use_call)
1154 retval = emit_block_move_via_libcall (x, y, size);
1156 emit_block_move_via_loop (x, y, size, align);
1158 if (method == BLOCK_OP_CALL_PARM)
1164 /* A subroutine of emit_block_move. Returns true if calling the
1165 block move libcall will not clobber any parameters which may have
1166 already been placed on the stack. */
1169 block_move_libcall_safe_for_call_parm (void)
1171 /* If arguments are pushed on the stack, then they're safe. */
1175 /* If registers go on the stack anyway, any argument is sure to clobber
1176 an outgoing argument. */
1177 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1179 tree fn = emit_block_move_libcall_fn (false);
1181 if (REG_PARM_STACK_SPACE (fn) != 0)
1186 /* If any argument goes in memory, then it might clobber an outgoing
1189 CUMULATIVE_ARGS args_so_far;
1192 fn = emit_block_move_libcall_fn (false);
1193 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1196 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1198 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1199 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1200 if (!tmp || !REG_P (tmp))
1202 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1205 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1211 /* A subroutine of emit_block_move. Expand a movmem pattern;
1212 return true if successful. */
1215 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1217 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1218 int save_volatile_ok = volatile_ok;
1219 enum machine_mode mode;
1221 /* Since this is a move insn, we don't care about volatility. */
1224 /* Try the most limited insn first, because there's no point
1225 including more than one in the machine description unless
1226 the more limited one has some advantage. */
1228 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1229 mode = GET_MODE_WIDER_MODE (mode))
1231 enum insn_code code = movmem_optab[(int) mode];
1232 insn_operand_predicate_fn pred;
1234 if (code != CODE_FOR_nothing
1235 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1236 here because if SIZE is less than the mode mask, as it is
1237 returned by the macro, it will definitely be less than the
1238 actual mode mask. */
1239 && ((GET_CODE (size) == CONST_INT
1240 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1241 <= (GET_MODE_MASK (mode) >> 1)))
1242 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1243 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1244 || (*pred) (x, BLKmode))
1245 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1246 || (*pred) (y, BLKmode))
1247 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1248 || (*pred) (opalign, VOIDmode)))
1251 rtx last = get_last_insn ();
1254 op2 = convert_to_mode (mode, size, 1);
1255 pred = insn_data[(int) code].operand[2].predicate;
1256 if (pred != 0 && ! (*pred) (op2, mode))
1257 op2 = copy_to_mode_reg (mode, op2);
1259 /* ??? When called via emit_block_move_for_call, it'd be
1260 nice if there were some way to inform the backend, so
1261 that it doesn't fail the expansion because it thinks
1262 emitting the libcall would be more efficient. */
1264 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1268 volatile_ok = save_volatile_ok;
1272 delete_insns_since (last);
1276 volatile_ok = save_volatile_ok;
1280 /* A subroutine of emit_block_move. Expand a call to memcpy.
1281 Return the return value from memcpy, 0 otherwise. */
1284 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1286 rtx dst_addr, src_addr;
1287 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1288 enum machine_mode size_mode;
1291 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1292 pseudos. We can then place those new pseudos into a VAR_DECL and
1295 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1296 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1298 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1299 src_addr = convert_memory_address (ptr_mode, src_addr);
1301 dst_tree = make_tree (ptr_type_node, dst_addr);
1302 src_tree = make_tree (ptr_type_node, src_addr);
1304 size_mode = TYPE_MODE (sizetype);
1306 size = convert_to_mode (size_mode, size, 1);
1307 size = copy_to_mode_reg (size_mode, size);
1309 /* It is incorrect to use the libcall calling conventions to call
1310 memcpy in this context. This could be a user call to memcpy and
1311 the user may wish to examine the return value from memcpy. For
1312 targets where libcalls and normal calls have different conventions
1313 for returning pointers, we could end up generating incorrect code. */
1315 size_tree = make_tree (sizetype, size);
1317 fn = emit_block_move_libcall_fn (true);
1318 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1319 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1320 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1322 /* Now we have to build up the CALL_EXPR itself. */
1323 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1324 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1325 call_expr, arg_list, NULL_TREE);
1327 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1329 /* If we are initializing a readonly value, show the above call clobbered
1330 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1331 the delay slot scheduler might overlook conflicts and take nasty
1333 if (RTX_UNCHANGING_P (dst))
1334 add_function_usage_to
1335 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1336 gen_rtx_CLOBBER (VOIDmode, dst),
1342 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1343 for the function we use for block copies. The first time FOR_CALL
1344 is true, we call assemble_external. */
1346 static GTY(()) tree block_move_fn;
1349 init_block_move_fn (const char *asmspec)
1355 fn = get_identifier ("memcpy");
1356 args = build_function_type_list (ptr_type_node, ptr_type_node,
1357 const_ptr_type_node, sizetype,
1360 fn = build_decl (FUNCTION_DECL, fn, args);
1361 DECL_EXTERNAL (fn) = 1;
1362 TREE_PUBLIC (fn) = 1;
1363 DECL_ARTIFICIAL (fn) = 1;
1364 TREE_NOTHROW (fn) = 1;
1370 set_user_assembler_name (block_move_fn, asmspec);
1374 emit_block_move_libcall_fn (int for_call)
1376 static bool emitted_extern;
1379 init_block_move_fn (NULL);
1381 if (for_call && !emitted_extern)
1383 emitted_extern = true;
1384 make_decl_rtl (block_move_fn);
1385 assemble_external (block_move_fn);
1388 return block_move_fn;
1391 /* A subroutine of emit_block_move. Copy the data via an explicit
1392 loop. This is used only when libcalls are forbidden. */
1393 /* ??? It'd be nice to copy in hunks larger than QImode. */
1396 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1397 unsigned int align ATTRIBUTE_UNUSED)
1399 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1400 enum machine_mode iter_mode;
1402 iter_mode = GET_MODE (size);
1403 if (iter_mode == VOIDmode)
1404 iter_mode = word_mode;
1406 top_label = gen_label_rtx ();
1407 cmp_label = gen_label_rtx ();
1408 iter = gen_reg_rtx (iter_mode);
1410 emit_move_insn (iter, const0_rtx);
1412 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1413 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1414 do_pending_stack_adjust ();
1416 emit_jump (cmp_label);
1417 emit_label (top_label);
1419 tmp = convert_modes (Pmode, iter_mode, iter, true);
1420 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1421 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1422 x = change_address (x, QImode, x_addr);
1423 y = change_address (y, QImode, y_addr);
1425 emit_move_insn (x, y);
1427 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1428 true, OPTAB_LIB_WIDEN);
1430 emit_move_insn (iter, tmp);
1432 emit_label (cmp_label);
1434 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1438 /* Copy all or part of a value X into registers starting at REGNO.
1439 The number of registers to be filled is NREGS. */
1442 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1445 #ifdef HAVE_load_multiple
1453 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1454 x = validize_mem (force_const_mem (mode, x));
1456 /* See if the machine can do this with a load multiple insn. */
1457 #ifdef HAVE_load_multiple
1458 if (HAVE_load_multiple)
1460 last = get_last_insn ();
1461 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1469 delete_insns_since (last);
1473 for (i = 0; i < nregs; i++)
1474 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1475 operand_subword_force (x, i, mode));
1478 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1479 The number of registers to be filled is NREGS. */
1482 move_block_from_reg (int regno, rtx x, int nregs)
1489 /* See if the machine can do this with a store multiple insn. */
1490 #ifdef HAVE_store_multiple
1491 if (HAVE_store_multiple)
1493 rtx last = get_last_insn ();
1494 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1502 delete_insns_since (last);
1506 for (i = 0; i < nregs; i++)
1508 rtx tem = operand_subword (x, i, 1, BLKmode);
1513 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1517 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1518 ORIG, where ORIG is a non-consecutive group of registers represented by
1519 a PARALLEL. The clone is identical to the original except in that the
1520 original set of registers is replaced by a new set of pseudo registers.
1521 The new set has the same modes as the original set. */
1524 gen_group_rtx (rtx orig)
1529 if (GET_CODE (orig) != PARALLEL)
1532 length = XVECLEN (orig, 0);
1533 tmps = alloca (sizeof (rtx) * length);
1535 /* Skip a NULL entry in first slot. */
1536 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1541 for (; i < length; i++)
1543 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1544 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1546 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1549 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1552 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1553 where DST is non-consecutive registers represented by a PARALLEL.
1554 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1558 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1563 if (GET_CODE (dst) != PARALLEL)
1566 /* Check for a NULL entry, used to indicate that the parameter goes
1567 both on the stack and in registers. */
1568 if (XEXP (XVECEXP (dst, 0, 0), 0))
1573 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1575 /* Process the pieces. */
1576 for (i = start; i < XVECLEN (dst, 0); i++)
1578 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1579 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1580 unsigned int bytelen = GET_MODE_SIZE (mode);
1583 /* Handle trailing fragments that run over the size of the struct. */
1584 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1586 /* Arrange to shift the fragment to where it belongs.
1587 extract_bit_field loads to the lsb of the reg. */
1589 #ifdef BLOCK_REG_PADDING
1590 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1591 == (BYTES_BIG_ENDIAN ? upward : downward)
1596 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1597 bytelen = ssize - bytepos;
1602 /* If we won't be loading directly from memory, protect the real source
1603 from strange tricks we might play; but make sure that the source can
1604 be loaded directly into the destination. */
1606 if (!MEM_P (orig_src)
1607 && (!CONSTANT_P (orig_src)
1608 || (GET_MODE (orig_src) != mode
1609 && GET_MODE (orig_src) != VOIDmode)))
1611 if (GET_MODE (orig_src) == VOIDmode)
1612 src = gen_reg_rtx (mode);
1614 src = gen_reg_rtx (GET_MODE (orig_src));
1616 emit_move_insn (src, orig_src);
1619 /* Optimize the access just a bit. */
1621 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1622 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1623 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1624 && bytelen == GET_MODE_SIZE (mode))
1626 tmps[i] = gen_reg_rtx (mode);
1627 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1629 else if (GET_CODE (src) == CONCAT)
1631 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1632 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1634 if ((bytepos == 0 && bytelen == slen0)
1635 || (bytepos != 0 && bytepos + bytelen <= slen))
1637 /* The following assumes that the concatenated objects all
1638 have the same size. In this case, a simple calculation
1639 can be used to determine the object and the bit field
1641 tmps[i] = XEXP (src, bytepos / slen0);
1642 if (! CONSTANT_P (tmps[i])
1643 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1644 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1645 (bytepos % slen0) * BITS_PER_UNIT,
1646 1, NULL_RTX, mode, mode);
1648 else if (bytepos == 0)
1650 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1651 emit_move_insn (mem, src);
1652 tmps[i] = adjust_address (mem, mode, 0);
1657 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1658 SIMD register, which is currently broken. While we get GCC
1659 to emit proper RTL for these cases, let's dump to memory. */
1660 else if (VECTOR_MODE_P (GET_MODE (dst))
1663 int slen = GET_MODE_SIZE (GET_MODE (src));
1666 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1667 emit_move_insn (mem, src);
1668 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1670 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1671 && XVECLEN (dst, 0) > 1)
1672 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1673 else if (CONSTANT_P (src)
1674 || (REG_P (src) && GET_MODE (src) == mode))
1677 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1678 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1682 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1683 build_int_2 (shift, 0), tmps[i], 0);
1686 /* Copy the extracted pieces into the proper (probable) hard regs. */
1687 for (i = start; i < XVECLEN (dst, 0); i++)
1688 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1691 /* Emit code to move a block SRC to block DST, where SRC and DST are
1692 non-consecutive groups of registers, each represented by a PARALLEL. */
1695 emit_group_move (rtx dst, rtx src)
1699 if (GET_CODE (src) != PARALLEL
1700 || GET_CODE (dst) != PARALLEL
1701 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1704 /* Skip first entry if NULL. */
1705 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1706 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1707 XEXP (XVECEXP (src, 0, i), 0));
1710 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1711 where SRC is non-consecutive registers represented by a PARALLEL.
1712 SSIZE represents the total size of block ORIG_DST, or -1 if not
1716 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1721 if (GET_CODE (src) != PARALLEL)
1724 /* Check for a NULL entry, used to indicate that the parameter goes
1725 both on the stack and in registers. */
1726 if (XEXP (XVECEXP (src, 0, 0), 0))
1731 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1733 /* Copy the (probable) hard regs into pseudos. */
1734 for (i = start; i < XVECLEN (src, 0); i++)
1736 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1737 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1738 emit_move_insn (tmps[i], reg);
1741 /* If we won't be storing directly into memory, protect the real destination
1742 from strange tricks we might play. */
1744 if (GET_CODE (dst) == PARALLEL)
1748 /* We can get a PARALLEL dst if there is a conditional expression in
1749 a return statement. In that case, the dst and src are the same,
1750 so no action is necessary. */
1751 if (rtx_equal_p (dst, src))
1754 /* It is unclear if we can ever reach here, but we may as well handle
1755 it. Allocate a temporary, and split this into a store/load to/from
1758 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1759 emit_group_store (temp, src, type, ssize);
1760 emit_group_load (dst, temp, type, ssize);
1763 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1765 dst = gen_reg_rtx (GET_MODE (orig_dst));
1766 /* Make life a bit easier for combine. */
1767 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1770 /* Process the pieces. */
1771 for (i = start; i < XVECLEN (src, 0); i++)
1773 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1774 enum machine_mode mode = GET_MODE (tmps[i]);
1775 unsigned int bytelen = GET_MODE_SIZE (mode);
1778 /* Handle trailing fragments that run over the size of the struct. */
1779 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1781 /* store_bit_field always takes its value from the lsb.
1782 Move the fragment to the lsb if it's not already there. */
1784 #ifdef BLOCK_REG_PADDING
1785 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1786 == (BYTES_BIG_ENDIAN ? upward : downward)
1792 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1793 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1794 build_int_2 (shift, 0), tmps[i], 0);
1796 bytelen = ssize - bytepos;
1799 if (GET_CODE (dst) == CONCAT)
1801 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1802 dest = XEXP (dst, 0);
1803 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1805 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1806 dest = XEXP (dst, 1);
1808 else if (bytepos == 0 && XVECLEN (src, 0))
1810 dest = assign_stack_temp (GET_MODE (dest),
1811 GET_MODE_SIZE (GET_MODE (dest)), 0);
1812 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1821 /* Optimize the access just a bit. */
1823 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1824 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1825 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1826 && bytelen == GET_MODE_SIZE (mode))
1827 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1829 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1833 /* Copy from the pseudo into the (probable) hard reg. */
1834 if (orig_dst != dst)
1835 emit_move_insn (orig_dst, dst);
1838 /* Generate code to copy a BLKmode object of TYPE out of a
1839 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1840 is null, a stack temporary is created. TGTBLK is returned.
1842 The purpose of this routine is to handle functions that return
1843 BLKmode structures in registers. Some machines (the PA for example)
1844 want to return all small structures in registers regardless of the
1845 structure's alignment. */
1848 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1850 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1851 rtx src = NULL, dst = NULL;
1852 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1853 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1857 tgtblk = assign_temp (build_qualified_type (type,
1859 | TYPE_QUAL_CONST)),
1861 preserve_temp_slots (tgtblk);
1864 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1865 into a new pseudo which is a full word. */
1867 if (GET_MODE (srcreg) != BLKmode
1868 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1869 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1871 /* If the structure doesn't take up a whole number of words, see whether
1872 SRCREG is padded on the left or on the right. If it's on the left,
1873 set PADDING_CORRECTION to the number of bits to skip.
1875 In most ABIs, the structure will be returned at the least end of
1876 the register, which translates to right padding on little-endian
1877 targets and left padding on big-endian targets. The opposite
1878 holds if the structure is returned at the most significant
1879 end of the register. */
1880 if (bytes % UNITS_PER_WORD != 0
1881 && (targetm.calls.return_in_msb (type)
1883 : BYTES_BIG_ENDIAN))
1885 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1887 /* Copy the structure BITSIZE bites at a time.
1889 We could probably emit more efficient code for machines which do not use
1890 strict alignment, but it doesn't seem worth the effort at the current
1892 for (bitpos = 0, xbitpos = padding_correction;
1893 bitpos < bytes * BITS_PER_UNIT;
1894 bitpos += bitsize, xbitpos += bitsize)
1896 /* We need a new source operand each time xbitpos is on a
1897 word boundary and when xbitpos == padding_correction
1898 (the first time through). */
1899 if (xbitpos % BITS_PER_WORD == 0
1900 || xbitpos == padding_correction)
1901 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1904 /* We need a new destination operand each time bitpos is on
1906 if (bitpos % BITS_PER_WORD == 0)
1907 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1909 /* Use xbitpos for the source extraction (right justified) and
1910 xbitpos for the destination store (left justified). */
1911 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1912 extract_bit_field (src, bitsize,
1913 xbitpos % BITS_PER_WORD, 1,
1914 NULL_RTX, word_mode, word_mode));
1920 /* Add a USE expression for REG to the (possibly empty) list pointed
1921 to by CALL_FUSAGE. REG must denote a hard register. */
1924 use_reg (rtx *call_fusage, rtx reg)
1927 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1931 = gen_rtx_EXPR_LIST (VOIDmode,
1932 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1935 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1936 starting at REGNO. All of these registers must be hard registers. */
1939 use_regs (rtx *call_fusage, int regno, int nregs)
1943 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1946 for (i = 0; i < nregs; i++)
1947 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1950 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1951 PARALLEL REGS. This is for calls that pass values in multiple
1952 non-contiguous locations. The Irix 6 ABI has examples of this. */
1955 use_group_regs (rtx *call_fusage, rtx regs)
1959 for (i = 0; i < XVECLEN (regs, 0); i++)
1961 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1963 /* A NULL entry means the parameter goes both on the stack and in
1964 registers. This can also be a MEM for targets that pass values
1965 partially on the stack and partially in registers. */
1966 if (reg != 0 && REG_P (reg))
1967 use_reg (call_fusage, reg);
1972 /* Determine whether the LEN bytes generated by CONSTFUN can be
1973 stored to memory using several move instructions. CONSTFUNDATA is
1974 a pointer which will be passed as argument in every CONSTFUN call.
1975 ALIGN is maximum alignment we can assume. Return nonzero if a
1976 call to store_by_pieces should succeed. */
1979 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1980 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1981 void *constfundata, unsigned int align)
1983 unsigned HOST_WIDE_INT max_size, l;
1984 HOST_WIDE_INT offset = 0;
1985 enum machine_mode mode, tmode;
1986 enum insn_code icode;
1993 if (! STORE_BY_PIECES_P (len, align))
1996 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1997 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1998 align = MOVE_MAX * BITS_PER_UNIT;
2000 /* We would first store what we can in the largest integer mode, then go to
2001 successively smaller modes. */
2004 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2009 max_size = STORE_MAX_PIECES + 1;
2010 while (max_size > 1)
2012 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2013 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2014 if (GET_MODE_SIZE (tmode) < max_size)
2017 if (mode == VOIDmode)
2020 icode = mov_optab->handlers[(int) mode].insn_code;
2021 if (icode != CODE_FOR_nothing
2022 && align >= GET_MODE_ALIGNMENT (mode))
2024 unsigned int size = GET_MODE_SIZE (mode);
2031 cst = (*constfun) (constfundata, offset, mode);
2032 if (!LEGITIMATE_CONSTANT_P (cst))
2042 max_size = GET_MODE_SIZE (mode);
2045 /* The code above should have handled everything. */
2053 /* Generate several move instructions to store LEN bytes generated by
2054 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2055 pointer which will be passed as argument in every CONSTFUN call.
2056 ALIGN is maximum alignment we can assume.
2057 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2058 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2062 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2063 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2064 void *constfundata, unsigned int align, int endp)
2066 struct store_by_pieces data;
2075 if (! STORE_BY_PIECES_P (len, align))
2077 data.constfun = constfun;
2078 data.constfundata = constfundata;
2081 store_by_pieces_1 (&data, align);
2092 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2093 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2095 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2098 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2105 to1 = adjust_address (data.to, QImode, data.offset);
2113 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2114 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2117 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2119 struct store_by_pieces data;
2124 data.constfun = clear_by_pieces_1;
2125 data.constfundata = NULL;
2128 store_by_pieces_1 (&data, align);
2131 /* Callback routine for clear_by_pieces.
2132 Return const0_rtx unconditionally. */
2135 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2136 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2137 enum machine_mode mode ATTRIBUTE_UNUSED)
2142 /* Subroutine of clear_by_pieces and store_by_pieces.
2143 Generate several move instructions to store LEN bytes of block TO. (A MEM
2144 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2147 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2148 unsigned int align ATTRIBUTE_UNUSED)
2150 rtx to_addr = XEXP (data->to, 0);
2151 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2152 enum machine_mode mode = VOIDmode, tmode;
2153 enum insn_code icode;
2156 data->to_addr = to_addr;
2158 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2159 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2161 data->explicit_inc_to = 0;
2163 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2165 data->offset = data->len;
2167 /* If storing requires more than two move insns,
2168 copy addresses to registers (to make displacements shorter)
2169 and use post-increment if available. */
2170 if (!data->autinc_to
2171 && move_by_pieces_ninsns (data->len, align) > 2)
2173 /* Determine the main mode we'll be using. */
2174 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2175 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2176 if (GET_MODE_SIZE (tmode) < max_size)
2179 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2181 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2182 data->autinc_to = 1;
2183 data->explicit_inc_to = -1;
2186 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2187 && ! data->autinc_to)
2189 data->to_addr = copy_addr_to_reg (to_addr);
2190 data->autinc_to = 1;
2191 data->explicit_inc_to = 1;
2194 if ( !data->autinc_to && CONSTANT_P (to_addr))
2195 data->to_addr = copy_addr_to_reg (to_addr);
2198 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2199 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2200 align = MOVE_MAX * BITS_PER_UNIT;
2202 /* First store what we can in the largest integer mode, then go to
2203 successively smaller modes. */
2205 while (max_size > 1)
2207 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2208 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2209 if (GET_MODE_SIZE (tmode) < max_size)
2212 if (mode == VOIDmode)
2215 icode = mov_optab->handlers[(int) mode].insn_code;
2216 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2217 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2219 max_size = GET_MODE_SIZE (mode);
2222 /* The code above should have handled everything. */
2227 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2228 with move instructions for mode MODE. GENFUN is the gen_... function
2229 to make a move insn for that mode. DATA has all the other info. */
2232 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2233 struct store_by_pieces *data)
2235 unsigned int size = GET_MODE_SIZE (mode);
2238 while (data->len >= size)
2241 data->offset -= size;
2243 if (data->autinc_to)
2244 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2247 to1 = adjust_address (data->to, mode, data->offset);
2249 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2250 emit_insn (gen_add2_insn (data->to_addr,
2251 GEN_INT (-(HOST_WIDE_INT) size)));
2253 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2254 emit_insn ((*genfun) (to1, cst));
2256 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2257 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2259 if (! data->reverse)
2260 data->offset += size;
2266 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2267 its length in bytes. */
2270 clear_storage (rtx object, rtx size)
2273 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2274 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2276 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2277 just move a zero. Otherwise, do this a piece at a time. */
2278 if (GET_MODE (object) != BLKmode
2279 && GET_CODE (size) == CONST_INT
2280 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2281 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2284 if (size == const0_rtx)
2286 else if (GET_CODE (size) == CONST_INT
2287 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2288 clear_by_pieces (object, INTVAL (size), align);
2289 else if (clear_storage_via_clrmem (object, size, align))
2292 retval = clear_storage_via_libcall (object, size);
2298 /* A subroutine of clear_storage. Expand a clrmem pattern;
2299 return true if successful. */
2302 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2304 /* Try the most limited insn first, because there's no point
2305 including more than one in the machine description unless
2306 the more limited one has some advantage. */
2308 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2309 enum machine_mode mode;
2311 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2312 mode = GET_MODE_WIDER_MODE (mode))
2314 enum insn_code code = clrmem_optab[(int) mode];
2315 insn_operand_predicate_fn pred;
2317 if (code != CODE_FOR_nothing
2318 /* We don't need MODE to be narrower than
2319 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2320 the mode mask, as it is returned by the macro, it will
2321 definitely be less than the actual mode mask. */
2322 && ((GET_CODE (size) == CONST_INT
2323 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2324 <= (GET_MODE_MASK (mode) >> 1)))
2325 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2326 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2327 || (*pred) (object, BLKmode))
2328 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2329 || (*pred) (opalign, VOIDmode)))
2332 rtx last = get_last_insn ();
2335 op1 = convert_to_mode (mode, size, 1);
2336 pred = insn_data[(int) code].operand[1].predicate;
2337 if (pred != 0 && ! (*pred) (op1, mode))
2338 op1 = copy_to_mode_reg (mode, op1);
2340 pat = GEN_FCN ((int) code) (object, op1, opalign);
2347 delete_insns_since (last);
2354 /* A subroutine of clear_storage. Expand a call to memset.
2355 Return the return value of memset, 0 otherwise. */
2358 clear_storage_via_libcall (rtx object, rtx size)
2360 tree call_expr, arg_list, fn, object_tree, size_tree;
2361 enum machine_mode size_mode;
2364 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2365 place those into new pseudos into a VAR_DECL and use them later. */
2367 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2369 size_mode = TYPE_MODE (sizetype);
2370 size = convert_to_mode (size_mode, size, 1);
2371 size = copy_to_mode_reg (size_mode, size);
2373 /* It is incorrect to use the libcall calling conventions to call
2374 memset in this context. This could be a user call to memset and
2375 the user may wish to examine the return value from memset. For
2376 targets where libcalls and normal calls have different conventions
2377 for returning pointers, we could end up generating incorrect code. */
2379 object_tree = make_tree (ptr_type_node, object);
2380 size_tree = make_tree (sizetype, size);
2382 fn = clear_storage_libcall_fn (true);
2383 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2384 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2385 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2387 /* Now we have to build up the CALL_EXPR itself. */
2388 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2389 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2390 call_expr, arg_list, NULL_TREE);
2392 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2394 /* If we are initializing a readonly value, show the above call
2395 clobbered it. Otherwise, a load from it may erroneously be
2396 hoisted from a loop. */
2397 if (RTX_UNCHANGING_P (object))
2398 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2403 /* A subroutine of clear_storage_via_libcall. Create the tree node
2404 for the function we use for block clears. The first time FOR_CALL
2405 is true, we call assemble_external. */
2407 static GTY(()) tree block_clear_fn;
2410 init_block_clear_fn (const char *asmspec)
2412 if (!block_clear_fn)
2416 fn = get_identifier ("memset");
2417 args = build_function_type_list (ptr_type_node, ptr_type_node,
2418 integer_type_node, sizetype,
2421 fn = build_decl (FUNCTION_DECL, fn, args);
2422 DECL_EXTERNAL (fn) = 1;
2423 TREE_PUBLIC (fn) = 1;
2424 DECL_ARTIFICIAL (fn) = 1;
2425 TREE_NOTHROW (fn) = 1;
2427 block_clear_fn = fn;
2431 set_user_assembler_name (block_clear_fn, asmspec);
2435 clear_storage_libcall_fn (int for_call)
2437 static bool emitted_extern;
2439 if (!block_clear_fn)
2440 init_block_clear_fn (NULL);
2442 if (for_call && !emitted_extern)
2444 emitted_extern = true;
2445 make_decl_rtl (block_clear_fn);
2446 assemble_external (block_clear_fn);
2449 return block_clear_fn;
2452 /* Generate code to copy Y into X.
2453 Both Y and X must have the same mode, except that
2454 Y can be a constant with VOIDmode.
2455 This mode cannot be BLKmode; use emit_block_move for that.
2457 Return the last instruction emitted. */
2460 emit_move_insn (rtx x, rtx y)
2462 enum machine_mode mode = GET_MODE (x);
2463 rtx y_cst = NULL_RTX;
2466 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2472 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2473 && (last_insn = compress_float_constant (x, y)))
2478 if (!LEGITIMATE_CONSTANT_P (y))
2480 y = force_const_mem (mode, y);
2482 /* If the target's cannot_force_const_mem prevented the spill,
2483 assume that the target's move expanders will also take care
2484 of the non-legitimate constant. */
2490 /* If X or Y are memory references, verify that their addresses are valid
2493 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2494 && ! push_operand (x, GET_MODE (x)))
2496 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2497 x = validize_mem (x);
2500 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2502 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2503 y = validize_mem (y);
2505 if (mode == BLKmode)
2508 last_insn = emit_move_insn_1 (x, y);
2510 if (y_cst && REG_P (x)
2511 && (set = single_set (last_insn)) != NULL_RTX
2512 && SET_DEST (set) == x
2513 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2514 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2519 /* Low level part of emit_move_insn.
2520 Called just like emit_move_insn, but assumes X and Y
2521 are basically valid. */
2524 emit_move_insn_1 (rtx x, rtx y)
2526 enum machine_mode mode = GET_MODE (x);
2527 enum machine_mode submode;
2528 enum mode_class class = GET_MODE_CLASS (mode);
2530 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2533 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2535 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2537 /* Expand complex moves by moving real part and imag part, if possible. */
2538 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2539 && BLKmode != (submode = GET_MODE_INNER (mode))
2540 && (mov_optab->handlers[(int) submode].insn_code
2541 != CODE_FOR_nothing))
2543 /* Don't split destination if it is a stack push. */
2544 int stack = push_operand (x, GET_MODE (x));
2546 #ifdef PUSH_ROUNDING
2547 /* In case we output to the stack, but the size is smaller than the
2548 machine can push exactly, we need to use move instructions. */
2550 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2551 != GET_MODE_SIZE (submode)))
2554 HOST_WIDE_INT offset1, offset2;
2556 /* Do not use anti_adjust_stack, since we don't want to update
2557 stack_pointer_delta. */
2558 temp = expand_binop (Pmode,
2559 #ifdef STACK_GROWS_DOWNWARD
2567 (GET_MODE_SIZE (GET_MODE (x)))),
2568 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2570 if (temp != stack_pointer_rtx)
2571 emit_move_insn (stack_pointer_rtx, temp);
2573 #ifdef STACK_GROWS_DOWNWARD
2575 offset2 = GET_MODE_SIZE (submode);
2577 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2578 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2579 + GET_MODE_SIZE (submode));
2582 emit_move_insn (change_address (x, submode,
2583 gen_rtx_PLUS (Pmode,
2585 GEN_INT (offset1))),
2586 gen_realpart (submode, y));
2587 emit_move_insn (change_address (x, submode,
2588 gen_rtx_PLUS (Pmode,
2590 GEN_INT (offset2))),
2591 gen_imagpart (submode, y));
2595 /* If this is a stack, push the highpart first, so it
2596 will be in the argument order.
2598 In that case, change_address is used only to convert
2599 the mode, not to change the address. */
2602 /* Note that the real part always precedes the imag part in memory
2603 regardless of machine's endianness. */
2604 #ifdef STACK_GROWS_DOWNWARD
2605 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2606 gen_imagpart (submode, y));
2607 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2608 gen_realpart (submode, y));
2610 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2611 gen_realpart (submode, y));
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
2618 rtx realpart_x, realpart_y;
2619 rtx imagpart_x, imagpart_y;
2621 /* If this is a complex value with each part being smaller than a
2622 word, the usual calling sequence will likely pack the pieces into
2623 a single register. Unfortunately, SUBREG of hard registers only
2624 deals in terms of words, so we have a problem converting input
2625 arguments to the CONCAT of two registers that is used elsewhere
2626 for complex values. If this is before reload, we can copy it into
2627 memory and reload. FIXME, we should see about using extract and
2628 insert on integer registers, but complex short and complex char
2629 variables should be rarely used. */
2630 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2631 && (reload_in_progress | reload_completed) == 0)
2634 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2636 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2638 if (packed_dest_p || packed_src_p)
2640 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2641 ? MODE_FLOAT : MODE_INT);
2643 enum machine_mode reg_mode
2644 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2646 if (reg_mode != BLKmode)
2648 rtx mem = assign_stack_temp (reg_mode,
2649 GET_MODE_SIZE (mode), 0);
2650 rtx cmem = adjust_address (mem, mode, 0);
2654 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2656 emit_move_insn_1 (cmem, y);
2657 return emit_move_insn_1 (sreg, mem);
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2663 emit_move_insn_1 (mem, sreg);
2664 return emit_move_insn_1 (x, cmem);
2670 realpart_x = gen_realpart (submode, x);
2671 realpart_y = gen_realpart (submode, y);
2672 imagpart_x = gen_imagpart (submode, x);
2673 imagpart_y = gen_imagpart (submode, y);
2675 /* Show the output dies here. This is necessary for SUBREGs
2676 of pseudos since we cannot track their lifetimes correctly;
2677 hard regs shouldn't appear here except as return values.
2678 We never want to emit such a clobber after reload. */
2680 && ! (reload_in_progress || reload_completed)
2681 && (GET_CODE (realpart_x) == SUBREG
2682 || GET_CODE (imagpart_x) == SUBREG))
2683 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2685 emit_move_insn (realpart_x, realpart_y);
2686 emit_move_insn (imagpart_x, imagpart_y);
2689 return get_last_insn ();
2692 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2693 find a mode to do it in. If we have a movcc, use it. Otherwise,
2694 find the MODE_INT mode of the same width. */
2695 else if (GET_MODE_CLASS (mode) == MODE_CC
2696 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2698 enum insn_code insn_code;
2699 enum machine_mode tmode = VOIDmode;
2703 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2706 for (tmode = QImode; tmode != VOIDmode;
2707 tmode = GET_MODE_WIDER_MODE (tmode))
2708 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2711 if (tmode == VOIDmode)
2714 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2715 may call change_address which is not appropriate if we were
2716 called when a reload was in progress. We don't have to worry
2717 about changing the address since the size in bytes is supposed to
2718 be the same. Copy the MEM to change the mode and move any
2719 substitutions from the old MEM to the new one. */
2721 if (reload_in_progress)
2723 x = gen_lowpart_common (tmode, x1);
2724 if (x == 0 && MEM_P (x1))
2726 x = adjust_address_nv (x1, tmode, 0);
2727 copy_replacements (x1, x);
2730 y = gen_lowpart_common (tmode, y1);
2731 if (y == 0 && MEM_P (y1))
2733 y = adjust_address_nv (y1, tmode, 0);
2734 copy_replacements (y1, y);
2739 x = gen_lowpart (tmode, x);
2740 y = gen_lowpart (tmode, y);
2743 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2744 return emit_insn (GEN_FCN (insn_code) (x, y));
2747 /* Try using a move pattern for the corresponding integer mode. This is
2748 only safe when simplify_subreg can convert MODE constants into integer
2749 constants. At present, it can only do this reliably if the value
2750 fits within a HOST_WIDE_INT. */
2751 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2752 && (submode = int_mode_for_mode (mode)) != BLKmode
2753 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2754 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2755 (simplify_gen_subreg (submode, x, mode, 0),
2756 simplify_gen_subreg (submode, y, mode, 0)));
2758 /* This will handle any multi-word or full-word mode that lacks a move_insn
2759 pattern. However, you will get better code if you define such patterns,
2760 even if they must turn into multiple assembler instructions. */
2761 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2768 #ifdef PUSH_ROUNDING
2770 /* If X is a push on the stack, do the push now and replace
2771 X with a reference to the stack pointer. */
2772 if (push_operand (x, GET_MODE (x)))
2777 /* Do not use anti_adjust_stack, since we don't want to update
2778 stack_pointer_delta. */
2779 temp = expand_binop (Pmode,
2780 #ifdef STACK_GROWS_DOWNWARD
2788 (GET_MODE_SIZE (GET_MODE (x)))),
2789 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2791 if (temp != stack_pointer_rtx)
2792 emit_move_insn (stack_pointer_rtx, temp);
2794 code = GET_CODE (XEXP (x, 0));
2796 /* Just hope that small offsets off SP are OK. */
2797 if (code == POST_INC)
2798 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2799 GEN_INT (-((HOST_WIDE_INT)
2800 GET_MODE_SIZE (GET_MODE (x)))));
2801 else if (code == POST_DEC)
2802 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2803 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2805 temp = stack_pointer_rtx;
2807 x = change_address (x, VOIDmode, temp);
2811 /* If we are in reload, see if either operand is a MEM whose address
2812 is scheduled for replacement. */
2813 if (reload_in_progress && MEM_P (x)
2814 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2815 x = replace_equiv_address_nv (x, inner);
2816 if (reload_in_progress && MEM_P (y)
2817 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2818 y = replace_equiv_address_nv (y, inner);
2824 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2827 rtx xpart = operand_subword (x, i, 1, mode);
2828 rtx ypart = operand_subword (y, i, 1, mode);
2830 /* If we can't get a part of Y, put Y into memory if it is a
2831 constant. Otherwise, force it into a register. If we still
2832 can't get a part of Y, abort. */
2833 if (ypart == 0 && CONSTANT_P (y))
2835 y = force_const_mem (mode, y);
2836 ypart = operand_subword (y, i, 1, mode);
2838 else if (ypart == 0)
2839 ypart = operand_subword_force (y, i, mode);
2841 if (xpart == 0 || ypart == 0)
2844 need_clobber |= (GET_CODE (xpart) == SUBREG);
2846 last_insn = emit_move_insn (xpart, ypart);
2852 /* Show the output dies here. This is necessary for SUBREGs
2853 of pseudos since we cannot track their lifetimes correctly;
2854 hard regs shouldn't appear here except as return values.
2855 We never want to emit such a clobber after reload. */
2857 && ! (reload_in_progress || reload_completed)
2858 && need_clobber != 0)
2859 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2869 /* If Y is representable exactly in a narrower mode, and the target can
2870 perform the extension directly from constant or memory, then emit the
2871 move as an extension. */
2874 compress_float_constant (rtx x, rtx y)
2876 enum machine_mode dstmode = GET_MODE (x);
2877 enum machine_mode orig_srcmode = GET_MODE (y);
2878 enum machine_mode srcmode;
2881 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2883 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2884 srcmode != orig_srcmode;
2885 srcmode = GET_MODE_WIDER_MODE (srcmode))
2888 rtx trunc_y, last_insn;
2890 /* Skip if the target can't extend this way. */
2891 ic = can_extend_p (dstmode, srcmode, 0);
2892 if (ic == CODE_FOR_nothing)
2895 /* Skip if the narrowed value isn't exact. */
2896 if (! exact_real_truncate (srcmode, &r))
2899 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2901 if (LEGITIMATE_CONSTANT_P (trunc_y))
2903 /* Skip if the target needs extra instructions to perform
2905 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2908 else if (float_extend_from_mem[dstmode][srcmode])
2909 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2913 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2914 last_insn = get_last_insn ();
2917 set_unique_reg_note (last_insn, REG_EQUAL, y);
2925 /* Pushing data onto the stack. */
2927 /* Push a block of length SIZE (perhaps variable)
2928 and return an rtx to address the beginning of the block.
2929 The value may be virtual_outgoing_args_rtx.
2931 EXTRA is the number of bytes of padding to push in addition to SIZE.
2932 BELOW nonzero means this padding comes at low addresses;
2933 otherwise, the padding comes at high addresses. */
2936 push_block (rtx size, int extra, int below)
2940 size = convert_modes (Pmode, ptr_mode, size, 1);
2941 if (CONSTANT_P (size))
2942 anti_adjust_stack (plus_constant (size, extra));
2943 else if (REG_P (size) && extra == 0)
2944 anti_adjust_stack (size);
2947 temp = copy_to_mode_reg (Pmode, size);
2949 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2950 temp, 0, OPTAB_LIB_WIDEN);
2951 anti_adjust_stack (temp);
2954 #ifndef STACK_GROWS_DOWNWARD
2960 temp = virtual_outgoing_args_rtx;
2961 if (extra != 0 && below)
2962 temp = plus_constant (temp, extra);
2966 if (GET_CODE (size) == CONST_INT)
2967 temp = plus_constant (virtual_outgoing_args_rtx,
2968 -INTVAL (size) - (below ? 0 : extra));
2969 else if (extra != 0 && !below)
2970 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2971 negate_rtx (Pmode, plus_constant (size, extra)));
2973 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2974 negate_rtx (Pmode, size));
2977 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2980 #ifdef PUSH_ROUNDING
2982 /* Emit single push insn. */
2985 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2988 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2990 enum insn_code icode;
2991 insn_operand_predicate_fn pred;
2993 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
2994 /* If there is push pattern, use it. Otherwise try old way of throwing
2995 MEM representing push operation to move expander. */
2996 icode = push_optab->handlers[(int) mode].insn_code;
2997 if (icode != CODE_FOR_nothing)
2999 if (((pred = insn_data[(int) icode].operand[0].predicate)
3000 && !((*pred) (x, mode))))
3001 x = force_reg (mode, x);
3002 emit_insn (GEN_FCN (icode) (x));
3005 if (GET_MODE_SIZE (mode) == rounded_size)
3006 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3007 /* If we are to pad downward, adjust the stack pointer first and
3008 then store X into the stack location using an offset. This is
3009 because emit_move_insn does not know how to pad; it does not have
3011 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3013 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3014 HOST_WIDE_INT offset;
3016 emit_move_insn (stack_pointer_rtx,
3017 expand_binop (Pmode,
3018 #ifdef STACK_GROWS_DOWNWARD
3024 GEN_INT (rounded_size),
3025 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3027 offset = (HOST_WIDE_INT) padding_size;
3028 #ifdef STACK_GROWS_DOWNWARD
3029 if (STACK_PUSH_CODE == POST_DEC)
3030 /* We have already decremented the stack pointer, so get the
3032 offset += (HOST_WIDE_INT) rounded_size;
3034 if (STACK_PUSH_CODE == POST_INC)
3035 /* We have already incremented the stack pointer, so get the
3037 offset -= (HOST_WIDE_INT) rounded_size;
3039 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3043 #ifdef STACK_GROWS_DOWNWARD
3044 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3045 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3046 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3048 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3049 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3050 GEN_INT (rounded_size));
3052 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3055 dest = gen_rtx_MEM (mode, dest_addr);
3059 set_mem_attributes (dest, type, 1);
3061 if (flag_optimize_sibling_calls)
3062 /* Function incoming arguments may overlap with sibling call
3063 outgoing arguments and we cannot allow reordering of reads
3064 from function arguments with stores to outgoing arguments
3065 of sibling calls. */
3066 set_mem_alias_set (dest, 0);
3068 emit_move_insn (dest, x);
3072 /* Generate code to push X onto the stack, assuming it has mode MODE and
3074 MODE is redundant except when X is a CONST_INT (since they don't
3076 SIZE is an rtx for the size of data to be copied (in bytes),
3077 needed only if X is BLKmode.
3079 ALIGN (in bits) is maximum alignment we can assume.
3081 If PARTIAL and REG are both nonzero, then copy that many of the first
3082 words of X into registers starting with REG, and push the rest of X.
3083 The amount of space pushed is decreased by PARTIAL words,
3084 rounded *down* to a multiple of PARM_BOUNDARY.
3085 REG must be a hard register in this case.
3086 If REG is zero but PARTIAL is not, take any all others actions for an
3087 argument partially in registers, but do not actually load any
3090 EXTRA is the amount in bytes of extra space to leave next to this arg.
3091 This is ignored if an argument block has already been allocated.
3093 On a machine that lacks real push insns, ARGS_ADDR is the address of
3094 the bottom of the argument block for this call. We use indexing off there
3095 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3096 argument block has not been preallocated.
3098 ARGS_SO_FAR is the size of args previously pushed for this call.
3100 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3101 for arguments passed in registers. If nonzero, it will be the number
3102 of bytes required. */
3105 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3106 unsigned int align, int partial, rtx reg, int extra,
3107 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3111 enum direction stack_direction
3112 #ifdef STACK_GROWS_DOWNWARD
3118 /* Decide where to pad the argument: `downward' for below,
3119 `upward' for above, or `none' for don't pad it.
3120 Default is below for small data on big-endian machines; else above. */
3121 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3123 /* Invert direction if stack is post-decrement.
3125 if (STACK_PUSH_CODE == POST_DEC)
3126 if (where_pad != none)
3127 where_pad = (where_pad == downward ? upward : downward);
3131 if (mode == BLKmode)
3133 /* Copy a block into the stack, entirely or partially. */
3136 int used = partial * UNITS_PER_WORD;
3140 if (reg && GET_CODE (reg) == PARALLEL)
3142 /* Use the size of the elt to compute offset. */
3143 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3144 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3145 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3148 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3155 /* USED is now the # of bytes we need not copy to the stack
3156 because registers will take care of them. */
3159 xinner = adjust_address (xinner, BLKmode, used);
3161 /* If the partial register-part of the arg counts in its stack size,
3162 skip the part of stack space corresponding to the registers.
3163 Otherwise, start copying to the beginning of the stack space,
3164 by setting SKIP to 0. */
3165 skip = (reg_parm_stack_space == 0) ? 0 : used;
3167 #ifdef PUSH_ROUNDING
3168 /* Do it with several push insns if that doesn't take lots of insns
3169 and if there is no difficulty with push insns that skip bytes
3170 on the stack for alignment purposes. */
3173 && GET_CODE (size) == CONST_INT
3175 && MEM_ALIGN (xinner) >= align
3176 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3177 /* Here we avoid the case of a structure whose weak alignment
3178 forces many pushes of a small amount of data,
3179 and such small pushes do rounding that causes trouble. */
3180 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3181 || align >= BIGGEST_ALIGNMENT
3182 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3183 == (align / BITS_PER_UNIT)))
3184 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
3191 anti_adjust_stack (GEN_INT (extra));
3193 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3196 #endif /* PUSH_ROUNDING */
3200 /* Otherwise make space on the stack and copy the data
3201 to the address of that space. */
3203 /* Deduct words put into registers from the size we must copy. */
3206 if (GET_CODE (size) == CONST_INT)
3207 size = GEN_INT (INTVAL (size) - used);
3209 size = expand_binop (GET_MODE (size), sub_optab, size,
3210 GEN_INT (used), NULL_RTX, 0,
3214 /* Get the address of the stack space.
3215 In this case, we do not deal with EXTRA separately.
3216 A single stack adjust will do. */
3219 temp = push_block (size, extra, where_pad == downward);
3222 else if (GET_CODE (args_so_far) == CONST_INT)
3223 temp = memory_address (BLKmode,
3224 plus_constant (args_addr,
3225 skip + INTVAL (args_so_far)));
3227 temp = memory_address (BLKmode,
3228 plus_constant (gen_rtx_PLUS (Pmode,
3233 if (!ACCUMULATE_OUTGOING_ARGS)
3235 /* If the source is referenced relative to the stack pointer,
3236 copy it to another register to stabilize it. We do not need
3237 to do this if we know that we won't be changing sp. */
3239 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3240 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3241 temp = copy_to_reg (temp);
3244 target = gen_rtx_MEM (BLKmode, temp);
3246 /* We do *not* set_mem_attributes here, because incoming arguments
3247 may overlap with sibling call outgoing arguments and we cannot
3248 allow reordering of reads from function arguments with stores
3249 to outgoing arguments of sibling calls. We do, however, want
3250 to record the alignment of the stack slot. */
3251 /* ALIGN may well be better aligned than TYPE, e.g. due to
3252 PARM_BOUNDARY. Assume the caller isn't lying. */
3253 set_mem_align (target, align);
3255 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3258 else if (partial > 0)
3260 /* Scalar partly in registers. */
3262 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3265 /* # words of start of argument
3266 that we must make space for but need not store. */
3267 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3268 int args_offset = INTVAL (args_so_far);
3271 /* Push padding now if padding above and stack grows down,
3272 or if padding below and stack grows up.
3273 But if space already allocated, this has already been done. */
3274 if (extra && args_addr == 0
3275 && where_pad != none && where_pad != stack_direction)
3276 anti_adjust_stack (GEN_INT (extra));
3278 /* If we make space by pushing it, we might as well push
3279 the real data. Otherwise, we can leave OFFSET nonzero
3280 and leave the space uninitialized. */
3284 /* Now NOT_STACK gets the number of words that we don't need to
3285 allocate on the stack. */
3286 not_stack = partial - offset;
3288 /* If the partial register-part of the arg counts in its stack size,
3289 skip the part of stack space corresponding to the registers.
3290 Otherwise, start copying to the beginning of the stack space,
3291 by setting SKIP to 0. */
3292 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3294 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3295 x = validize_mem (force_const_mem (mode, x));
3297 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3298 SUBREGs of such registers are not allowed. */
3299 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3300 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3301 x = copy_to_reg (x);
3303 /* Loop over all the words allocated on the stack for this arg. */
3304 /* We can do it by words, because any scalar bigger than a word
3305 has a size a multiple of a word. */
3306 #ifndef PUSH_ARGS_REVERSED
3307 for (i = not_stack; i < size; i++)
3309 for (i = size - 1; i >= not_stack; i--)
3311 if (i >= not_stack + offset)
3312 emit_push_insn (operand_subword_force (x, i, mode),
3313 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3315 GEN_INT (args_offset + ((i - not_stack + skip)
3317 reg_parm_stack_space, alignment_pad);
3324 /* Push padding now if padding above and stack grows down,
3325 or if padding below and stack grows up.
3326 But if space already allocated, this has already been done. */
3327 if (extra && args_addr == 0
3328 && where_pad != none && where_pad != stack_direction)
3329 anti_adjust_stack (GEN_INT (extra));
3331 #ifdef PUSH_ROUNDING
3332 if (args_addr == 0 && PUSH_ARGS)
3333 emit_single_push_insn (mode, x, type);
3337 if (GET_CODE (args_so_far) == CONST_INT)
3339 = memory_address (mode,
3340 plus_constant (args_addr,
3341 INTVAL (args_so_far)));
3343 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3345 dest = gen_rtx_MEM (mode, addr);
3347 /* We do *not* set_mem_attributes here, because incoming arguments
3348 may overlap with sibling call outgoing arguments and we cannot
3349 allow reordering of reads from function arguments with stores
3350 to outgoing arguments of sibling calls. We do, however, want
3351 to record the alignment of the stack slot. */
3352 /* ALIGN may well be better aligned than TYPE, e.g. due to
3353 PARM_BOUNDARY. Assume the caller isn't lying. */
3354 set_mem_align (dest, align);
3356 emit_move_insn (dest, x);
3360 /* If part should go in registers, copy that part
3361 into the appropriate registers. Do this now, at the end,
3362 since mem-to-mem copies above may do function calls. */
3363 if (partial > 0 && reg != 0)
3365 /* Handle calls that pass values in multiple non-contiguous locations.
3366 The Irix 6 ABI has examples of this. */
3367 if (GET_CODE (reg) == PARALLEL)
3368 emit_group_load (reg, x, type, -1);
3370 move_block_to_reg (REGNO (reg), x, partial, mode);
3373 if (extra && args_addr == 0 && where_pad == stack_direction)
3374 anti_adjust_stack (GEN_INT (extra));
3376 if (alignment_pad && args_addr == 0)
3377 anti_adjust_stack (alignment_pad);
3380 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3384 get_subtarget (rtx x)
3387 /* Only registers can be subtargets. */
3389 /* If the register is readonly, it can't be set more than once. */
3390 || RTX_UNCHANGING_P (x)
3391 /* Don't use hard regs to avoid extending their life. */
3392 || REGNO (x) < FIRST_PSEUDO_REGISTER
3393 /* Avoid subtargets inside loops,
3394 since they hide some invariant expressions. */
3395 || preserve_subexpressions_p ())
3399 /* Expand an assignment that stores the value of FROM into TO.
3400 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3401 (If the value is constant, this rtx is a constant.)
3402 Otherwise, the returned value is NULL_RTX. */
3405 expand_assignment (tree to, tree from, int want_value)
3410 /* Don't crash if the lhs of the assignment was erroneous. */
3412 if (TREE_CODE (to) == ERROR_MARK)
3414 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3415 return want_value ? result : NULL_RTX;
3418 /* Assignment of a structure component needs special treatment
3419 if the structure component's rtx is not simply a MEM.
3420 Assignment of an array element at a constant index, and assignment of
3421 an array element in an unaligned packed structure field, has the same
3424 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3425 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3426 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3428 enum machine_mode mode1;
3429 HOST_WIDE_INT bitsize, bitpos;
3437 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3438 &unsignedp, &volatilep);
3440 /* If we are going to use store_bit_field and extract_bit_field,
3441 make sure to_rtx will be safe for multiple use. */
3443 if (mode1 == VOIDmode && want_value)
3444 tem = stabilize_reference (tem);
3446 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3450 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3452 if (!MEM_P (to_rtx))
3455 #ifdef POINTERS_EXTEND_UNSIGNED
3456 if (GET_MODE (offset_rtx) != Pmode)
3457 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3459 if (GET_MODE (offset_rtx) != ptr_mode)
3460 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3463 /* A constant address in TO_RTX can have VOIDmode, we must not try
3464 to call force_reg for that case. Avoid that case. */
3466 && GET_MODE (to_rtx) == BLKmode
3467 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3469 && (bitpos % bitsize) == 0
3470 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3471 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3473 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3477 to_rtx = offset_address (to_rtx, offset_rtx,
3478 highest_pow2_factor_for_target (to,
3484 /* If the field is at offset zero, we could have been given the
3485 DECL_RTX of the parent struct. Don't munge it. */
3486 to_rtx = shallow_copy_rtx (to_rtx);
3488 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3491 /* Deal with volatile and readonly fields. The former is only done
3492 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3493 if (volatilep && MEM_P (to_rtx))
3495 if (to_rtx == orig_to_rtx)
3496 to_rtx = copy_rtx (to_rtx);
3497 MEM_VOLATILE_P (to_rtx) = 1;
3500 if (TREE_CODE (to) == COMPONENT_REF
3501 && TREE_READONLY (TREE_OPERAND (to, 1))
3502 /* We can't assert that a MEM won't be set more than once
3503 if the component is not addressable because another
3504 non-addressable component may be referenced by the same MEM. */
3505 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3507 if (to_rtx == orig_to_rtx)
3508 to_rtx = copy_rtx (to_rtx);
3509 RTX_UNCHANGING_P (to_rtx) = 1;
3512 if (MEM_P (to_rtx) && ! can_address_p (to))
3514 if (to_rtx == orig_to_rtx)
3515 to_rtx = copy_rtx (to_rtx);
3516 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3519 /* Optimize bitfld op= val in certain cases. */
3520 while (mode1 == VOIDmode && !want_value
3521 && bitsize > 0 && bitsize < BITS_PER_WORD
3522 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3523 && !TREE_SIDE_EFFECTS (to)
3524 && !TREE_THIS_VOLATILE (to))
3527 rtx value, str_rtx = to_rtx;
3528 HOST_WIDE_INT bitpos1 = bitpos;
3533 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3534 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3537 op0 = TREE_OPERAND (src, 0);
3538 op1 = TREE_OPERAND (src, 1);
3541 if (! operand_equal_p (to, op0, 0))
3544 if (MEM_P (str_rtx))
3546 enum machine_mode mode = GET_MODE (str_rtx);
3547 HOST_WIDE_INT offset1;
3549 if (GET_MODE_BITSIZE (mode) == 0
3550 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3552 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3554 if (mode == VOIDmode)
3558 bitpos1 %= GET_MODE_BITSIZE (mode);
3559 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3560 str_rtx = adjust_address (str_rtx, mode, offset1);
3562 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3565 /* If the bit field covers the whole REG/MEM, store_field
3566 will likely generate better code. */
3567 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3570 /* We can't handle fields split accross multiple entities. */
3571 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3574 if (BYTES_BIG_ENDIAN)
3575 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3578 /* Special case some bitfield op= exp. */
3579 switch (TREE_CODE (src))
3583 /* For now, just optimize the case of the topmost bitfield
3584 where we don't need to do any masking and also
3585 1 bit bitfields where xor can be used.
3586 We might win by one instruction for the other bitfields
3587 too if insv/extv instructions aren't used, so that
3588 can be added later. */
3589 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3590 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3592 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3593 value = convert_modes (GET_MODE (str_rtx),
3594 TYPE_MODE (TREE_TYPE (op1)), value,
3595 TYPE_UNSIGNED (TREE_TYPE (op1)));
3597 /* We may be accessing data outside the field, which means
3598 we can alias adjacent data. */
3599 if (MEM_P (str_rtx))
3601 str_rtx = shallow_copy_rtx (str_rtx);
3602 set_mem_alias_set (str_rtx, 0);
3603 set_mem_expr (str_rtx, 0);
3606 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3608 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3610 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3614 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3615 value, build_int_2 (bitpos1, 0),
3617 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3618 value, str_rtx, 1, OPTAB_WIDEN);
3619 if (result != str_rtx)
3620 emit_move_insn (str_rtx, result);
3632 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3634 /* Spurious cast for HPUX compiler. */
3635 ? ((enum machine_mode)
3636 TYPE_MODE (TREE_TYPE (to)))
3638 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3640 preserve_temp_slots (result);
3644 /* If the value is meaningful, convert RESULT to the proper mode.
3645 Otherwise, return nothing. */
3646 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3647 TYPE_MODE (TREE_TYPE (from)),
3649 TYPE_UNSIGNED (TREE_TYPE (to)))
3653 /* If the rhs is a function call and its value is not an aggregate,
3654 call the function before we start to compute the lhs.
3655 This is needed for correct code for cases such as
3656 val = setjmp (buf) on machines where reference to val
3657 requires loading up part of an address in a separate insn.
3659 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3660 since it might be a promoted variable where the zero- or sign- extension
3661 needs to be done. Handling this in the normal way is safe because no
3662 computation is done before the call. */
3663 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3664 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3665 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3666 && REG_P (DECL_RTL (to))))
3671 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3673 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3675 /* Handle calls that return values in multiple non-contiguous locations.
3676 The Irix 6 ABI has examples of this. */
3677 if (GET_CODE (to_rtx) == PARALLEL)
3678 emit_group_load (to_rtx, value, TREE_TYPE (from),
3679 int_size_in_bytes (TREE_TYPE (from)));
3680 else if (GET_MODE (to_rtx) == BLKmode)
3681 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3684 if (POINTER_TYPE_P (TREE_TYPE (to)))
3685 value = convert_memory_address (GET_MODE (to_rtx), value);
3686 emit_move_insn (to_rtx, value);
3688 preserve_temp_slots (to_rtx);
3691 return want_value ? to_rtx : NULL_RTX;
3694 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3695 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3698 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3700 /* Don't move directly into a return register. */
3701 if (TREE_CODE (to) == RESULT_DECL
3702 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3707 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3709 if (GET_CODE (to_rtx) == PARALLEL)
3710 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3711 int_size_in_bytes (TREE_TYPE (from)));
3713 emit_move_insn (to_rtx, temp);
3715 preserve_temp_slots (to_rtx);
3718 return want_value ? to_rtx : NULL_RTX;
3721 /* In case we are returning the contents of an object which overlaps
3722 the place the value is being stored, use a safe function when copying
3723 a value through a pointer into a structure value return block. */
3724 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3725 && current_function_returns_struct
3726 && !current_function_returns_pcc_struct)
3731 size = expr_size (from);
3732 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3734 emit_library_call (memmove_libfunc, LCT_NORMAL,
3735 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3736 XEXP (from_rtx, 0), Pmode,
3737 convert_to_mode (TYPE_MODE (sizetype),
3738 size, TYPE_UNSIGNED (sizetype)),
3739 TYPE_MODE (sizetype));
3741 preserve_temp_slots (to_rtx);
3744 return want_value ? to_rtx : NULL_RTX;
3747 /* Compute FROM and store the value in the rtx we got. */
3750 result = store_expr (from, to_rtx, want_value);
3751 preserve_temp_slots (result);
3754 return want_value ? result : NULL_RTX;
3757 /* Generate code for computing expression EXP,
3758 and storing the value into TARGET.
3760 If WANT_VALUE & 1 is nonzero, return a copy of the value
3761 not in TARGET, so that we can be sure to use the proper
3762 value in a containing expression even if TARGET has something
3763 else stored in it. If possible, we copy the value through a pseudo
3764 and return that pseudo. Or, if the value is constant, we try to
3765 return the constant. In some cases, we return a pseudo
3766 copied *from* TARGET.
3768 If the mode is BLKmode then we may return TARGET itself.
3769 It turns out that in BLKmode it doesn't cause a problem.
3770 because C has no operators that could combine two different
3771 assignments into the same BLKmode object with different values
3772 with no sequence point. Will other languages need this to
3775 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3776 to catch quickly any cases where the caller uses the value
3777 and fails to set WANT_VALUE.
3779 If WANT_VALUE & 2 is set, this is a store into a call param on the
3780 stack, and block moves may need to be treated specially. */
3783 store_expr (tree exp, rtx target, int want_value)
3786 rtx alt_rtl = NULL_RTX;
3787 int dont_return_target = 0;
3788 int dont_store_target = 0;
3790 if (VOID_TYPE_P (TREE_TYPE (exp)))
3792 /* C++ can generate ?: expressions with a throw expression in one
3793 branch and an rvalue in the other. Here, we resolve attempts to
3794 store the throw expression's nonexistent result. */
3797 expand_expr (exp, const0_rtx, VOIDmode, 0);
3800 if (TREE_CODE (exp) == COMPOUND_EXPR)
3802 /* Perform first part of compound expression, then assign from second
3804 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3805 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3806 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3808 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3810 /* For conditional expression, get safe form of the target. Then
3811 test the condition, doing the appropriate assignment on either
3812 side. This avoids the creation of unnecessary temporaries.
3813 For non-BLKmode, it is more efficient not to do this. */
3815 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3817 do_pending_stack_adjust ();
3819 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3820 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3821 emit_jump_insn (gen_jump (lab2));
3824 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3828 return want_value & 1 ? target : NULL_RTX;
3830 else if ((want_value & 1) != 0
3832 && ! MEM_VOLATILE_P (target)
3833 && GET_MODE (target) != BLKmode)
3834 /* If target is in memory and caller wants value in a register instead,
3835 arrange that. Pass TARGET as target for expand_expr so that,
3836 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3837 We know expand_expr will not use the target in that case.
3838 Don't do this if TARGET is volatile because we are supposed
3839 to write it and then read it. */
3841 temp = expand_expr (exp, target, GET_MODE (target),
3842 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3843 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3845 /* If TEMP is already in the desired TARGET, only copy it from
3846 memory and don't store it there again. */
3848 || (rtx_equal_p (temp, target)
3849 && ! side_effects_p (temp) && ! side_effects_p (target)))
3850 dont_store_target = 1;
3851 temp = copy_to_reg (temp);
3853 dont_return_target = 1;
3855 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3856 /* If this is a scalar in a register that is stored in a wider mode
3857 than the declared mode, compute the result into its declared mode
3858 and then convert to the wider mode. Our value is the computed
3861 rtx inner_target = 0;
3863 /* If we don't want a value, we can do the conversion inside EXP,
3864 which will often result in some optimizations. Do the conversion
3865 in two steps: first change the signedness, if needed, then
3866 the extend. But don't do this if the type of EXP is a subtype
3867 of something else since then the conversion might involve
3868 more than just converting modes. */
3869 if ((want_value & 1) == 0
3870 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3871 && TREE_TYPE (TREE_TYPE (exp)) == 0
3872 && (!lang_hooks.reduce_bit_field_operations
3873 || (GET_MODE_PRECISION (GET_MODE (target))
3874 == TYPE_PRECISION (TREE_TYPE (exp)))))
3876 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3877 != SUBREG_PROMOTED_UNSIGNED_P (target))
3879 (lang_hooks.types.signed_or_unsigned_type
3880 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3882 exp = convert (lang_hooks.types.type_for_mode
3883 (GET_MODE (SUBREG_REG (target)),
3884 SUBREG_PROMOTED_UNSIGNED_P (target)),
3887 inner_target = SUBREG_REG (target);
3890 temp = expand_expr (exp, inner_target, VOIDmode,
3891 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3893 /* If TEMP is a MEM and we want a result value, make the access
3894 now so it gets done only once. Strictly speaking, this is
3895 only necessary if the MEM is volatile, or if the address
3896 overlaps TARGET. But not performing the load twice also
3897 reduces the amount of rtl we generate and then have to CSE. */
3898 if (MEM_P (temp) && (want_value & 1) != 0)
3899 temp = copy_to_reg (temp);
3901 /* If TEMP is a VOIDmode constant, use convert_modes to make
3902 sure that we properly convert it. */
3903 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3905 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3906 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3907 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3908 GET_MODE (target), temp,
3909 SUBREG_PROMOTED_UNSIGNED_P (target));
3912 convert_move (SUBREG_REG (target), temp,
3913 SUBREG_PROMOTED_UNSIGNED_P (target));
3915 /* If we promoted a constant, change the mode back down to match
3916 target. Otherwise, the caller might get confused by a result whose
3917 mode is larger than expected. */
3919 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3921 if (GET_MODE (temp) != VOIDmode)
3923 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3924 SUBREG_PROMOTED_VAR_P (temp) = 1;
3925 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3926 SUBREG_PROMOTED_UNSIGNED_P (target));
3929 temp = convert_modes (GET_MODE (target),
3930 GET_MODE (SUBREG_REG (target)),
3931 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3934 return want_value & 1 ? temp : NULL_RTX;
3938 temp = expand_expr_real (exp, target, GET_MODE (target),
3940 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3942 /* Return TARGET if it's a specified hardware register.
3943 If TARGET is a volatile mem ref, either return TARGET
3944 or return a reg copied *from* TARGET; ANSI requires this.
3946 Otherwise, if TEMP is not TARGET, return TEMP
3947 if it is constant (for efficiency),
3948 or if we really want the correct value. */
3949 if (!(target && REG_P (target)
3950 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3951 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3952 && ! rtx_equal_p (temp, target)
3953 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3954 dont_return_target = 1;
3957 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3958 the same as that of TARGET, adjust the constant. This is needed, for
3959 example, in case it is a CONST_DOUBLE and we want only a word-sized
3961 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3962 && TREE_CODE (exp) != ERROR_MARK
3963 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3964 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3965 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3967 /* If value was not generated in the target, store it there.
3968 Convert the value to TARGET's type first if necessary and emit the
3969 pending incrementations that have been queued when expanding EXP.
3970 Note that we cannot emit the whole queue blindly because this will
3971 effectively disable the POST_INC optimization later.
3973 If TEMP and TARGET compare equal according to rtx_equal_p, but
3974 one or both of them are volatile memory refs, we have to distinguish
3976 - expand_expr has used TARGET. In this case, we must not generate
3977 another copy. This can be detected by TARGET being equal according
3979 - expand_expr has not used TARGET - that means that the source just
3980 happens to have the same RTX form. Since temp will have been created
3981 by expand_expr, it will compare unequal according to == .
3982 We must generate a copy in this case, to reach the correct number
3983 of volatile memory references. */
3985 if ((! rtx_equal_p (temp, target)
3986 || (temp != target && (side_effects_p (temp)
3987 || side_effects_p (target))))
3988 && TREE_CODE (exp) != ERROR_MARK
3989 && ! dont_store_target
3990 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3991 but TARGET is not valid memory reference, TEMP will differ
3992 from TARGET although it is really the same location. */
3993 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3994 /* If there's nothing to copy, don't bother. Don't call expr_size
3995 unless necessary, because some front-ends (C++) expr_size-hook
3996 aborts on objects that are not supposed to be bit-copied or
3998 && expr_size (exp) != const0_rtx)
4000 if (GET_MODE (temp) != GET_MODE (target)
4001 && GET_MODE (temp) != VOIDmode)
4003 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4004 if (dont_return_target)
4006 /* In this case, we will return TEMP,
4007 so make sure it has the proper mode.
4008 But don't forget to store the value into TARGET. */
4009 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4010 emit_move_insn (target, temp);
4013 convert_move (target, temp, unsignedp);
4016 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4018 /* Handle copying a string constant into an array. The string
4019 constant may be shorter than the array. So copy just the string's
4020 actual length, and clear the rest. First get the size of the data
4021 type of the string, which is actually the size of the target. */
4022 rtx size = expr_size (exp);
4024 if (GET_CODE (size) == CONST_INT
4025 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4026 emit_block_move (target, temp, size,
4028 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4031 /* Compute the size of the data to copy from the string. */
4033 = size_binop (MIN_EXPR,
4034 make_tree (sizetype, size),
4035 size_int (TREE_STRING_LENGTH (exp)));
4037 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4039 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4042 /* Copy that much. */
4043 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4044 TYPE_UNSIGNED (sizetype));
4045 emit_block_move (target, temp, copy_size_rtx,
4047 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4049 /* Figure out how much is left in TARGET that we have to clear.
4050 Do all calculations in ptr_mode. */
4051 if (GET_CODE (copy_size_rtx) == CONST_INT)
4053 size = plus_constant (size, -INTVAL (copy_size_rtx));
4054 target = adjust_address (target, BLKmode,
4055 INTVAL (copy_size_rtx));
4059 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4060 copy_size_rtx, NULL_RTX, 0,
4063 #ifdef POINTERS_EXTEND_UNSIGNED
4064 if (GET_MODE (copy_size_rtx) != Pmode)
4065 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4066 TYPE_UNSIGNED (sizetype));
4069 target = offset_address (target, copy_size_rtx,
4070 highest_pow2_factor (copy_size));
4071 label = gen_label_rtx ();
4072 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4073 GET_MODE (size), 0, label);
4076 if (size != const0_rtx)
4077 clear_storage (target, size);
4083 /* Handle calls that return values in multiple non-contiguous locations.
4084 The Irix 6 ABI has examples of this. */
4085 else if (GET_CODE (target) == PARALLEL)
4086 emit_group_load (target, temp, TREE_TYPE (exp),
4087 int_size_in_bytes (TREE_TYPE (exp)));
4088 else if (GET_MODE (temp) == BLKmode)
4089 emit_block_move (target, temp, expr_size (exp),
4091 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4094 temp = force_operand (temp, target);
4096 emit_move_insn (target, temp);
4100 /* If we don't want a value, return NULL_RTX. */
4101 if ((want_value & 1) == 0)
4104 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4105 ??? The latter test doesn't seem to make sense. */
4106 else if (dont_return_target && !MEM_P (temp))
4109 /* Return TARGET itself if it is a hard register. */
4110 else if ((want_value & 1) != 0
4111 && GET_MODE (target) != BLKmode
4112 && ! (REG_P (target)
4113 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4114 return copy_to_reg (target);
4120 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4121 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4122 are set to non-constant values and place it in *P_NC_ELTS. */
4125 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4126 HOST_WIDE_INT *p_nc_elts)
4128 HOST_WIDE_INT nz_elts, nc_elts;
4134 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4136 tree value = TREE_VALUE (list);
4137 tree purpose = TREE_PURPOSE (list);
4141 if (TREE_CODE (purpose) == RANGE_EXPR)
4143 tree lo_index = TREE_OPERAND (purpose, 0);
4144 tree hi_index = TREE_OPERAND (purpose, 1);
4146 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4147 mult = (tree_low_cst (hi_index, 1)
4148 - tree_low_cst (lo_index, 1) + 1);
4151 switch (TREE_CODE (value))
4155 HOST_WIDE_INT nz = 0, nc = 0;
4156 categorize_ctor_elements_1 (value, &nz, &nc);
4157 nz_elts += mult * nz;
4158 nc_elts += mult * nc;
4164 if (!initializer_zerop (value))
4168 if (!initializer_zerop (TREE_REALPART (value)))
4170 if (!initializer_zerop (TREE_IMAGPART (value)))
4176 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4177 if (!initializer_zerop (TREE_VALUE (v)))
4184 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4190 *p_nz_elts += nz_elts;
4191 *p_nc_elts += nc_elts;
4195 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4196 HOST_WIDE_INT *p_nc_elts)
4200 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4203 /* Count the number of scalars in TYPE. Return -1 on overflow or
4207 count_type_elements (tree type)
4209 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4210 switch (TREE_CODE (type))
4214 tree telts = array_type_nelts (type);
4215 if (telts && host_integerp (telts, 1))
4217 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4218 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4221 else if (max / n > m)
4229 HOST_WIDE_INT n = 0, t;
4232 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4233 if (TREE_CODE (f) == FIELD_DECL)
4235 t = count_type_elements (TREE_TYPE (f));
4245 case QUAL_UNION_TYPE:
4247 /* Ho hum. How in the world do we guess here? Clearly it isn't
4248 right to count the fields. Guess based on the number of words. */
4249 HOST_WIDE_INT n = int_size_in_bytes (type);
4252 return n / UNITS_PER_WORD;
4259 return TYPE_VECTOR_SUBPARTS (type);
4268 case REFERENCE_TYPE:
4282 /* Return 1 if EXP contains mostly (3/4) zeros. */
4285 mostly_zeros_p (tree exp)
4287 if (TREE_CODE (exp) == CONSTRUCTOR)
4290 HOST_WIDE_INT nz_elts, nc_elts, elts;
4292 /* If there are no ranges of true bits, it is all zero. */
4293 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4294 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4296 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4297 elts = count_type_elements (TREE_TYPE (exp));
4299 return nz_elts < elts / 4;
4302 return initializer_zerop (exp);
4305 /* Helper function for store_constructor.
4306 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4307 TYPE is the type of the CONSTRUCTOR, not the element type.
4308 CLEARED is as for store_constructor.
4309 ALIAS_SET is the alias set to use for any stores.
4311 This provides a recursive shortcut back to store_constructor when it isn't
4312 necessary to go through store_field. This is so that we can pass through
4313 the cleared field to let store_constructor know that we may not have to
4314 clear a substructure if the outer structure has already been cleared. */
4317 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4318 HOST_WIDE_INT bitpos, enum machine_mode mode,
4319 tree exp, tree type, int cleared, int alias_set)
4321 if (TREE_CODE (exp) == CONSTRUCTOR
4322 /* We can only call store_constructor recursively if the size and
4323 bit position are on a byte boundary. */
4324 && bitpos % BITS_PER_UNIT == 0
4325 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4326 /* If we have a nonzero bitpos for a register target, then we just
4327 let store_field do the bitfield handling. This is unlikely to
4328 generate unnecessary clear instructions anyways. */
4329 && (bitpos == 0 || MEM_P (target)))
4333 = adjust_address (target,
4334 GET_MODE (target) == BLKmode
4336 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4337 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4340 /* Update the alias set, if required. */
4341 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4342 && MEM_ALIAS_SET (target) != 0)
4344 target = copy_rtx (target);
4345 set_mem_alias_set (target, alias_set);
4348 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4351 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4355 /* Store the value of constructor EXP into the rtx TARGET.
4356 TARGET is either a REG or a MEM; we know it cannot conflict, since
4357 safe_from_p has been called.
4358 CLEARED is true if TARGET is known to have been zero'd.
4359 SIZE is the number of bytes of TARGET we are allowed to modify: this
4360 may not be the same as the size of EXP if we are assigning to a field
4361 which has been packed to exclude padding bits. */
4364 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4366 tree type = TREE_TYPE (exp);
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4371 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4372 || TREE_CODE (type) == QUAL_UNION_TYPE)
4376 /* If size is zero or the target is already cleared, do nothing. */
4377 if (size == 0 || cleared)
4379 /* We either clear the aggregate or indicate the value is dead. */
4380 else if ((TREE_CODE (type) == UNION_TYPE
4381 || TREE_CODE (type) == QUAL_UNION_TYPE)
4382 && ! CONSTRUCTOR_ELTS (exp))
4383 /* If the constructor is empty, clear the union. */
4385 clear_storage (target, expr_size (exp));
4389 /* If we are building a static constructor into a register,
4390 set the initial value as zero so we can fold the value into
4391 a constant. But if more than one register is involved,
4392 this probably loses. */
4393 else if (REG_P (target) && TREE_STATIC (exp)
4394 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4396 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4400 /* If the constructor has fewer fields than the structure
4401 or if we are initializing the structure to mostly zeros,
4402 clear the whole structure first. Don't do this if TARGET is a
4403 register whose mode size isn't equal to SIZE since clear_storage
4404 can't handle this case. */
4406 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4407 || mostly_zeros_p (exp))
4409 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4412 rtx xtarget = target;
4414 if (readonly_fields_p (type))
4416 xtarget = copy_rtx (xtarget);
4417 RTX_UNCHANGING_P (xtarget) = 1;
4420 clear_storage (xtarget, GEN_INT (size));
4425 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4427 /* Store each element of the constructor into
4428 the corresponding field of TARGET. */
4430 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4432 tree field = TREE_PURPOSE (elt);
4433 tree value = TREE_VALUE (elt);
4434 enum machine_mode mode;
4435 HOST_WIDE_INT bitsize;
4436 HOST_WIDE_INT bitpos = 0;
4438 rtx to_rtx = target;
4440 /* Just ignore missing fields.
4441 We cleared the whole structure, above,
4442 if any fields are missing. */
4446 if (cleared && initializer_zerop (value))
4449 if (host_integerp (DECL_SIZE (field), 1))
4450 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4454 mode = DECL_MODE (field);
4455 if (DECL_BIT_FIELD (field))
4458 offset = DECL_FIELD_OFFSET (field);
4459 if (host_integerp (offset, 0)
4460 && host_integerp (bit_position (field), 0))
4462 bitpos = int_bit_position (field);
4466 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4473 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4474 make_tree (TREE_TYPE (exp),
4477 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4478 if (!MEM_P (to_rtx))
4481 #ifdef POINTERS_EXTEND_UNSIGNED
4482 if (GET_MODE (offset_rtx) != Pmode)
4483 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4485 if (GET_MODE (offset_rtx) != ptr_mode)
4486 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4489 to_rtx = offset_address (to_rtx, offset_rtx,
4490 highest_pow2_factor (offset));
4493 if (TREE_READONLY (field))
4496 to_rtx = copy_rtx (to_rtx);
4498 RTX_UNCHANGING_P (to_rtx) = 1;
4501 #ifdef WORD_REGISTER_OPERATIONS
4502 /* If this initializes a field that is smaller than a word, at the
4503 start of a word, try to widen it to a full word.
4504 This special case allows us to output C++ member function
4505 initializations in a form that the optimizers can understand. */
4507 && bitsize < BITS_PER_WORD
4508 && bitpos % BITS_PER_WORD == 0
4509 && GET_MODE_CLASS (mode) == MODE_INT
4510 && TREE_CODE (value) == INTEGER_CST
4512 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4514 tree type = TREE_TYPE (value);
4516 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4518 type = lang_hooks.types.type_for_size
4519 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4520 value = convert (type, value);
4523 if (BYTES_BIG_ENDIAN)
4525 = fold (build2 (LSHIFT_EXPR, type, value,
4526 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4527 bitsize = BITS_PER_WORD;
4532 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4533 && DECL_NONADDRESSABLE_P (field))
4535 to_rtx = copy_rtx (to_rtx);
4536 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4539 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4540 value, type, cleared,
4541 get_alias_set (TREE_TYPE (field)));
4545 else if (TREE_CODE (type) == ARRAY_TYPE)
4551 tree elttype = TREE_TYPE (type);
4553 HOST_WIDE_INT minelt = 0;
4554 HOST_WIDE_INT maxelt = 0;
4556 domain = TYPE_DOMAIN (type);
4557 const_bounds_p = (TYPE_MIN_VALUE (domain)
4558 && TYPE_MAX_VALUE (domain)
4559 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4560 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4562 /* If we have constant bounds for the range of the type, get them. */
4565 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4566 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4569 /* If the constructor has fewer elements than the array,
4570 clear the whole array first. Similarly if this is
4571 static constructor of a non-BLKmode object. */
4574 else if (REG_P (target) && TREE_STATIC (exp))
4578 HOST_WIDE_INT count = 0, zero_count = 0;
4579 need_to_clear = ! const_bounds_p;
4581 /* This loop is a more accurate version of the loop in
4582 mostly_zeros_p (it handles RANGE_EXPR in an index).
4583 It is also needed to check for missing elements. */
4584 for (elt = CONSTRUCTOR_ELTS (exp);
4585 elt != NULL_TREE && ! need_to_clear;
4586 elt = TREE_CHAIN (elt))
4588 tree index = TREE_PURPOSE (elt);
4589 HOST_WIDE_INT this_node_count;
4591 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4593 tree lo_index = TREE_OPERAND (index, 0);
4594 tree hi_index = TREE_OPERAND (index, 1);
4596 if (! host_integerp (lo_index, 1)
4597 || ! host_integerp (hi_index, 1))
4603 this_node_count = (tree_low_cst (hi_index, 1)
4604 - tree_low_cst (lo_index, 1) + 1);
4607 this_node_count = 1;
4609 count += this_node_count;
4610 if (mostly_zeros_p (TREE_VALUE (elt)))
4611 zero_count += this_node_count;
4614 /* Clear the entire array first if there are any missing elements,
4615 or if the incidence of zero elements is >= 75%. */
4617 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4621 if (need_to_clear && size > 0)
4624 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4626 clear_storage (target, GEN_INT (size));
4630 if (!cleared && REG_P (target))
4631 /* Inform later passes that the old value is dead. */
4632 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4634 /* Store each element of the constructor into
4635 the corresponding element of TARGET, determined
4636 by counting the elements. */
4637 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4639 elt = TREE_CHAIN (elt), i++)
4641 enum machine_mode mode;
4642 HOST_WIDE_INT bitsize;
4643 HOST_WIDE_INT bitpos;
4645 tree value = TREE_VALUE (elt);
4646 tree index = TREE_PURPOSE (elt);
4647 rtx xtarget = target;
4649 if (cleared && initializer_zerop (value))
4652 unsignedp = TYPE_UNSIGNED (elttype);
4653 mode = TYPE_MODE (elttype);
4654 if (mode == BLKmode)
4655 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4656 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4659 bitsize = GET_MODE_BITSIZE (mode);
4661 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4663 tree lo_index = TREE_OPERAND (index, 0);
4664 tree hi_index = TREE_OPERAND (index, 1);
4665 rtx index_r, pos_rtx;
4666 HOST_WIDE_INT lo, hi, count;
4669 /* If the range is constant and "small", unroll the loop. */
4671 && host_integerp (lo_index, 0)
4672 && host_integerp (hi_index, 0)
4673 && (lo = tree_low_cst (lo_index, 0),
4674 hi = tree_low_cst (hi_index, 0),
4675 count = hi - lo + 1,
4678 || (host_integerp (TYPE_SIZE (elttype), 1)
4679 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4682 lo -= minelt; hi -= minelt;
4683 for (; lo <= hi; lo++)
4685 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4688 && !MEM_KEEP_ALIAS_SET_P (target)
4689 && TREE_CODE (type) == ARRAY_TYPE
4690 && TYPE_NONALIASED_COMPONENT (type))
4692 target = copy_rtx (target);
4693 MEM_KEEP_ALIAS_SET_P (target) = 1;
4696 store_constructor_field
4697 (target, bitsize, bitpos, mode, value, type, cleared,
4698 get_alias_set (elttype));
4703 rtx loop_start = gen_label_rtx ();
4704 rtx loop_end = gen_label_rtx ();
4707 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4708 unsignedp = TYPE_UNSIGNED (domain);
4710 index = build_decl (VAR_DECL, NULL_TREE, domain);
4713 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4715 SET_DECL_RTL (index, index_r);
4716 store_expr (lo_index, index_r, 0);
4718 /* Build the head of the loop. */
4719 do_pending_stack_adjust ();
4720 emit_label (loop_start);
4722 /* Assign value to element index. */
4724 = convert (ssizetype,
4725 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4726 index, TYPE_MIN_VALUE (domain))));
4727 position = size_binop (MULT_EXPR, position,
4729 TYPE_SIZE_UNIT (elttype)));
4731 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4732 xtarget = offset_address (target, pos_rtx,
4733 highest_pow2_factor (position));
4734 xtarget = adjust_address (xtarget, mode, 0);
4735 if (TREE_CODE (value) == CONSTRUCTOR)
4736 store_constructor (value, xtarget, cleared,
4737 bitsize / BITS_PER_UNIT);
4739 store_expr (value, xtarget, 0);
4741 /* Generate a conditional jump to exit the loop. */
4742 exit_cond = build2 (LT_EXPR, integer_type_node,
4744 jumpif (exit_cond, loop_end);
4746 /* Update the loop counter, and jump to the head of
4748 expand_assignment (index,
4749 build2 (PLUS_EXPR, TREE_TYPE (index),
4750 index, integer_one_node), 0);
4752 emit_jump (loop_start);
4754 /* Build the end of the loop. */
4755 emit_label (loop_end);
4758 else if ((index != 0 && ! host_integerp (index, 0))
4759 || ! host_integerp (TYPE_SIZE (elttype), 1))
4764 index = ssize_int (1);
4767 index = fold_convert (ssizetype,
4768 fold (build2 (MINUS_EXPR,
4771 TYPE_MIN_VALUE (domain))));
4773 position = size_binop (MULT_EXPR, index,
4775 TYPE_SIZE_UNIT (elttype)));
4776 xtarget = offset_address (target,
4777 expand_expr (position, 0, VOIDmode, 0),
4778 highest_pow2_factor (position));
4779 xtarget = adjust_address (xtarget, mode, 0);
4780 store_expr (value, xtarget, 0);
4785 bitpos = ((tree_low_cst (index, 0) - minelt)
4786 * tree_low_cst (TYPE_SIZE (elttype), 1));
4788 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4790 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4791 && TREE_CODE (type) == ARRAY_TYPE
4792 && TYPE_NONALIASED_COMPONENT (type))
4794 target = copy_rtx (target);
4795 MEM_KEEP_ALIAS_SET_P (target) = 1;
4797 store_constructor_field (target, bitsize, bitpos, mode, value,
4798 type, cleared, get_alias_set (elttype));
4803 else if (TREE_CODE (type) == VECTOR_TYPE)
4809 tree elttype = TREE_TYPE (type);
4810 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4811 enum machine_mode eltmode = TYPE_MODE (elttype);
4812 HOST_WIDE_INT bitsize;
4813 HOST_WIDE_INT bitpos;
4817 if (eltmode == BLKmode)
4820 n_elts = TYPE_VECTOR_SUBPARTS (type);
4821 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4823 enum machine_mode mode = GET_MODE (target);
4825 icode = (int) vec_init_optab->handlers[mode].insn_code;
4826 if (icode != CODE_FOR_nothing)
4830 vector = alloca (n_elts);
4831 for (i = 0; i < n_elts; i++)
4832 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4836 /* If the constructor has fewer elements than the vector,
4837 clear the whole array first. Similarly if this is
4838 static constructor of a non-BLKmode object. */
4841 else if (REG_P (target) && TREE_STATIC (exp))
4845 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4847 for (elt = CONSTRUCTOR_ELTS (exp);
4849 elt = TREE_CHAIN (elt))
4853 int_const_binop (TRUNC_DIV_EXPR,
4854 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4855 TYPE_SIZE (elttype), 0), 1);
4857 count += n_elts_here;
4858 if (mostly_zeros_p (TREE_VALUE (elt)))
4859 zero_count += n_elts_here;
4862 /* Clear the entire vector first if there are any missing elements,
4863 or if the incidence of zero elements is >= 75%. */
4864 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4867 if (need_to_clear && size > 0 && !vector)
4870 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4872 clear_storage (target, GEN_INT (size));
4876 if (!cleared && REG_P (target))
4877 /* Inform later passes that the old value is dead. */
4878 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4880 /* Store each element of the constructor into the corresponding
4881 element of TARGET, determined by counting the elements. */
4882 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4884 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4886 tree value = TREE_VALUE (elt);
4887 tree index = TREE_PURPOSE (elt);
4888 HOST_WIDE_INT eltpos;
4890 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4891 if (cleared && initializer_zerop (value))
4895 eltpos = tree_low_cst (index, 1);
4901 /* Vector CONSTRUCTORs should only be built from smaller
4902 vectors in the case of BLKmode vectors. */
4903 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4905 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4909 enum machine_mode value_mode =
4910 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4911 ? TYPE_MODE (TREE_TYPE (value))
4913 bitpos = eltpos * elt_size;
4914 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4915 type, cleared, get_alias_set (elttype));
4920 emit_insn (GEN_FCN (icode) (target,
4921 gen_rtx_PARALLEL (GET_MODE (target),
4922 gen_rtvec_v (n_elts, vector))));
4925 /* Set constructor assignments. */
4926 else if (TREE_CODE (type) == SET_TYPE)
4928 tree elt = CONSTRUCTOR_ELTS (exp);
4929 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4930 tree domain = TYPE_DOMAIN (type);
4931 tree domain_min, domain_max, bitlength;
4933 /* The default implementation strategy is to extract the constant
4934 parts of the constructor, use that to initialize the target,
4935 and then "or" in whatever non-constant ranges we need in addition.
4937 If a large set is all zero or all ones, it is
4938 probably better to set it using memset.
4939 Also, if a large set has just a single range, it may also be
4940 better to first clear all the first clear the set (using
4941 memset), and set the bits we want. */
4943 /* Check for all zeros. */
4944 if (elt == NULL_TREE && size > 0)
4947 clear_storage (target, GEN_INT (size));
4951 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4952 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4953 bitlength = size_binop (PLUS_EXPR,
4954 size_diffop (domain_max, domain_min),
4957 nbits = tree_low_cst (bitlength, 1);
4959 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4960 are "complicated" (more than one range), initialize (the
4961 constant parts) by copying from a constant. */
4962 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4963 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4965 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4966 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4967 char *bit_buffer = alloca (nbits);
4968 HOST_WIDE_INT word = 0;
4969 unsigned int bit_pos = 0;
4970 unsigned int ibit = 0;
4971 unsigned int offset = 0; /* In bytes from beginning of set. */
4973 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4976 if (bit_buffer[ibit])
4978 if (BYTES_BIG_ENDIAN)
4979 word |= (1 << (set_word_size - 1 - bit_pos));
4981 word |= 1 << bit_pos;
4985 if (bit_pos >= set_word_size || ibit == nbits)
4987 if (word != 0 || ! cleared)
4989 rtx datum = gen_int_mode (word, mode);
4992 /* The assumption here is that it is safe to use
4993 XEXP if the set is multi-word, but not if
4994 it's single-word. */
4996 to_rtx = adjust_address (target, mode, offset);
4997 else if (offset == 0)
5001 emit_move_insn (to_rtx, datum);
5008 offset += set_word_size / BITS_PER_UNIT;
5013 /* Don't bother clearing storage if the set is all ones. */
5014 if (TREE_CHAIN (elt) != NULL_TREE
5015 || (TREE_PURPOSE (elt) == NULL_TREE
5017 : ( ! host_integerp (TREE_VALUE (elt), 0)
5018 || ! host_integerp (TREE_PURPOSE (elt), 0)
5019 || (tree_low_cst (TREE_VALUE (elt), 0)
5020 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5021 != (HOST_WIDE_INT) nbits))))
5022 clear_storage (target, expr_size (exp));
5024 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5026 /* Start of range of element or NULL. */
5027 tree startbit = TREE_PURPOSE (elt);
5028 /* End of range of element, or element value. */
5029 tree endbit = TREE_VALUE (elt);
5030 HOST_WIDE_INT startb, endb;
5031 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5033 bitlength_rtx = expand_expr (bitlength,
5034 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5036 /* Handle non-range tuple element like [ expr ]. */
5037 if (startbit == NULL_TREE)
5039 startbit = save_expr (endbit);
5043 startbit = convert (sizetype, startbit);
5044 endbit = convert (sizetype, endbit);
5045 if (! integer_zerop (domain_min))
5047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5051 EXPAND_CONST_ADDRESS);
5052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5053 EXPAND_CONST_ADDRESS);
5059 ((build_qualified_type (lang_hooks.types.type_for_mode
5060 (GET_MODE (target), 0),
5063 emit_move_insn (targetx, target);
5066 else if (MEM_P (target))
5071 /* Optimization: If startbit and endbit are constants divisible
5072 by BITS_PER_UNIT, call memset instead. */
5073 if (TREE_CODE (startbit) == INTEGER_CST
5074 && TREE_CODE (endbit) == INTEGER_CST
5075 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5076 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5078 emit_library_call (memset_libfunc, LCT_NORMAL,
5080 plus_constant (XEXP (targetx, 0),
5081 startb / BITS_PER_UNIT),
5083 constm1_rtx, TYPE_MODE (integer_type_node),
5084 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5085 TYPE_MODE (sizetype));
5088 emit_library_call (setbits_libfunc, LCT_NORMAL,
5089 VOIDmode, 4, XEXP (targetx, 0),
5090 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5091 startbit_rtx, TYPE_MODE (sizetype),
5092 endbit_rtx, TYPE_MODE (sizetype));
5095 emit_move_insn (target, targetx);
5103 /* Store the value of EXP (an expression tree)
5104 into a subfield of TARGET which has mode MODE and occupies
5105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5106 If MODE is VOIDmode, it means that we are storing into a bit-field.
5108 If VALUE_MODE is VOIDmode, return nothing in particular.
5109 UNSIGNEDP is not used in this case.
5111 Otherwise, return an rtx for the value stored. This rtx
5112 has mode VALUE_MODE if that is convenient to do.
5113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5115 TYPE is the type of the underlying object,
5117 ALIAS_SET is the alias set for the destination. This value will
5118 (in general) be different from that for TARGET, since TARGET is a
5119 reference to the containing structure. */
5122 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5123 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5124 int unsignedp, tree type, int alias_set)
5126 HOST_WIDE_INT width_mask = 0;
5128 if (TREE_CODE (exp) == ERROR_MARK)
5131 /* If we have nothing to store, do nothing unless the expression has
5134 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5135 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5136 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5138 /* If we are storing into an unaligned field of an aligned union that is
5139 in a register, we may have the mode of TARGET being an integer mode but
5140 MODE == BLKmode. In that case, get an aligned object whose size and
5141 alignment are the same as TARGET and store TARGET into it (we can avoid
5142 the store if the field being stored is the entire width of TARGET). Then
5143 call ourselves recursively to store the field into a BLKmode version of
5144 that object. Finally, load from the object into TARGET. This is not
5145 very efficient in general, but should only be slightly more expensive
5146 than the otherwise-required unaligned accesses. Perhaps this can be
5147 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5148 twice, once with emit_move_insn and once via store_field. */
5151 && (REG_P (target) || GET_CODE (target) == SUBREG))
5153 rtx object = assign_temp (type, 0, 1, 1);
5154 rtx blk_object = adjust_address (object, BLKmode, 0);
5156 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5157 emit_move_insn (object, target);
5159 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5162 emit_move_insn (target, object);
5164 /* We want to return the BLKmode version of the data. */
5168 if (GET_CODE (target) == CONCAT)
5170 /* We're storing into a struct containing a single __complex. */
5174 return store_expr (exp, target, value_mode != VOIDmode);
5177 /* If the structure is in a register or if the component
5178 is a bit field, we cannot use addressing to access it.
5179 Use bit-field techniques or SUBREG to store in it. */
5181 if (mode == VOIDmode
5182 || (mode != BLKmode && ! direct_store[(int) mode]
5183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5186 || GET_CODE (target) == SUBREG
5187 /* If the field isn't aligned enough to store as an ordinary memref,
5188 store it as a bit field. */
5190 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5191 || bitpos % GET_MODE_ALIGNMENT (mode))
5192 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5193 || (bitpos % BITS_PER_UNIT != 0)))
5194 /* If the RHS and field are a constant size and the size of the
5195 RHS isn't the same size as the bitfield, we must use bitfield
5198 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5199 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5201 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5203 /* If BITSIZE is narrower than the size of the type of EXP
5204 we will be narrowing TEMP. Normally, what's wanted are the
5205 low-order bits. However, if EXP's type is a record and this is
5206 big-endian machine, we want the upper BITSIZE bits. */
5207 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5208 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5209 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5210 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5211 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5215 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5217 if (mode != VOIDmode && mode != BLKmode
5218 && mode != TYPE_MODE (TREE_TYPE (exp)))
5219 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5221 /* If the modes of TARGET and TEMP are both BLKmode, both
5222 must be in memory and BITPOS must be aligned on a byte
5223 boundary. If so, we simply do a block copy. */
5224 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5226 if (!MEM_P (target) || !MEM_P (temp)
5227 || bitpos % BITS_PER_UNIT != 0)
5230 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5231 emit_block_move (target, temp,
5232 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5236 return value_mode == VOIDmode ? const0_rtx : target;
5239 /* Store the value in the bitfield. */
5240 store_bit_field (target, bitsize, bitpos, mode, temp);
5242 if (value_mode != VOIDmode)
5244 /* The caller wants an rtx for the value.
5245 If possible, avoid refetching from the bitfield itself. */
5247 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5250 enum machine_mode tmode;
5252 tmode = GET_MODE (temp);
5253 if (tmode == VOIDmode)
5257 return expand_and (tmode, temp,
5258 gen_int_mode (width_mask, tmode),
5261 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5262 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5263 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5266 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5267 NULL_RTX, value_mode, VOIDmode);
5273 rtx addr = XEXP (target, 0);
5274 rtx to_rtx = target;
5276 /* If a value is wanted, it must be the lhs;
5277 so make the address stable for multiple use. */
5279 if (value_mode != VOIDmode && !REG_P (addr)
5280 && ! CONSTANT_ADDRESS_P (addr)
5281 /* A frame-pointer reference is already stable. */
5282 && ! (GET_CODE (addr) == PLUS
5283 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5284 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5285 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5286 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5288 /* Now build a reference to just the desired component. */
5290 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5292 if (to_rtx == target)
5293 to_rtx = copy_rtx (to_rtx);
5295 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5296 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5297 set_mem_alias_set (to_rtx, alias_set);
5299 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5303 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5304 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5305 codes and find the ultimate containing object, which we return.
5307 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5308 bit position, and *PUNSIGNEDP to the signedness of the field.
5309 If the position of the field is variable, we store a tree
5310 giving the variable offset (in units) in *POFFSET.
5311 This offset is in addition to the bit position.
5312 If the position is not variable, we store 0 in *POFFSET.
5314 If any of the extraction expressions is volatile,
5315 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5317 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5318 is a mode that can be used to access the field. In that case, *PBITSIZE
5321 If the field describes a variable-sized object, *PMODE is set to
5322 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5323 this case, but the address of the object can be found. */
5326 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5327 HOST_WIDE_INT *pbitpos, tree *poffset,
5328 enum machine_mode *pmode, int *punsignedp,
5332 enum machine_mode mode = VOIDmode;
5333 tree offset = size_zero_node;
5334 tree bit_offset = bitsize_zero_node;
5337 /* First get the mode, signedness, and size. We do this from just the
5338 outermost expression. */
5339 if (TREE_CODE (exp) == COMPONENT_REF)
5341 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5342 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5343 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5345 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5347 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5349 size_tree = TREE_OPERAND (exp, 1);
5350 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5354 mode = TYPE_MODE (TREE_TYPE (exp));
5355 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5357 if (mode == BLKmode)
5358 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5360 *pbitsize = GET_MODE_BITSIZE (mode);
5365 if (! host_integerp (size_tree, 1))
5366 mode = BLKmode, *pbitsize = -1;
5368 *pbitsize = tree_low_cst (size_tree, 1);
5371 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5372 and find the ultimate containing object. */
5375 if (TREE_CODE (exp) == BIT_FIELD_REF)
5376 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5377 else if (TREE_CODE (exp) == COMPONENT_REF)
5379 tree field = TREE_OPERAND (exp, 1);
5380 tree this_offset = component_ref_field_offset (exp);
5382 /* If this field hasn't been filled in yet, don't go
5383 past it. This should only happen when folding expressions
5384 made during type construction. */
5385 if (this_offset == 0)
5388 offset = size_binop (PLUS_EXPR, offset, this_offset);
5389 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5390 DECL_FIELD_BIT_OFFSET (field));
5392 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5395 else if (TREE_CODE (exp) == ARRAY_REF
5396 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5398 tree index = TREE_OPERAND (exp, 1);
5399 tree low_bound = array_ref_low_bound (exp);
5400 tree unit_size = array_ref_element_size (exp);
5402 /* We assume all arrays have sizes that are a multiple of a byte.
5403 First subtract the lower bound, if any, in the type of the
5404 index, then convert to sizetype and multiply by the size of the
5406 if (! integer_zerop (low_bound))
5407 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5410 offset = size_binop (PLUS_EXPR, offset,
5411 size_binop (MULT_EXPR,
5412 convert (sizetype, index),
5416 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5417 conversions that don't change the mode, and all view conversions
5418 except those that need to "step up" the alignment. */
5419 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5420 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5421 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5422 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5424 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5425 < BIGGEST_ALIGNMENT)
5426 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5427 || TYPE_ALIGN_OK (TREE_TYPE
5428 (TREE_OPERAND (exp, 0))))))
5429 && ! ((TREE_CODE (exp) == NOP_EXPR
5430 || TREE_CODE (exp) == CONVERT_EXPR)
5431 && (TYPE_MODE (TREE_TYPE (exp))
5432 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5435 /* If any reference in the chain is volatile, the effect is volatile. */
5436 if (TREE_THIS_VOLATILE (exp))
5439 exp = TREE_OPERAND (exp, 0);
5442 /* If OFFSET is constant, see if we can return the whole thing as a
5443 constant bit position. Otherwise, split it up. */
5444 if (host_integerp (offset, 0)
5445 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5447 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5448 && host_integerp (tem, 0))
5449 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5451 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5457 /* Return a tree of sizetype representing the size, in bytes, of the element
5458 of EXP, an ARRAY_REF. */
5461 array_ref_element_size (tree exp)
5463 tree aligned_size = TREE_OPERAND (exp, 3);
5464 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5466 /* If a size was specified in the ARRAY_REF, it's the size measured
5467 in alignment units of the element type. So multiply by that value. */
5469 return size_binop (MULT_EXPR, aligned_size,
5470 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5472 /* Otherwise, take the size from that of the element type. Substitute
5473 any PLACEHOLDER_EXPR that we have. */
5475 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5478 /* Return a tree representing the lower bound of the array mentioned in
5479 EXP, an ARRAY_REF. */
5482 array_ref_low_bound (tree exp)
5484 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5486 /* If a lower bound is specified in EXP, use it. */
5487 if (TREE_OPERAND (exp, 2))
5488 return TREE_OPERAND (exp, 2);
5490 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5491 substituting for a PLACEHOLDER_EXPR as needed. */
5492 if (domain_type && TYPE_MIN_VALUE (domain_type))
5493 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5495 /* Otherwise, return a zero of the appropriate type. */
5496 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5499 /* Return a tree representing the upper bound of the array mentioned in
5500 EXP, an ARRAY_REF. */
5503 array_ref_up_bound (tree exp)
5505 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5507 /* If there is a domain type and it has an upper bound, use it, substituting
5508 for a PLACEHOLDER_EXPR as needed. */
5509 if (domain_type && TYPE_MAX_VALUE (domain_type))
5510 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5512 /* Otherwise fail. */
5516 /* Return a tree representing the offset, in bytes, of the field referenced
5517 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5520 component_ref_field_offset (tree exp)
5522 tree aligned_offset = TREE_OPERAND (exp, 2);
5523 tree field = TREE_OPERAND (exp, 1);
5525 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5526 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5529 return size_binop (MULT_EXPR, aligned_offset,
5530 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5532 /* Otherwise, take the offset from that of the field. Substitute
5533 any PLACEHOLDER_EXPR that we have. */
5535 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5538 /* Return 1 if T is an expression that get_inner_reference handles. */
5541 handled_component_p (tree t)
5543 switch (TREE_CODE (t))
5548 case ARRAY_RANGE_REF:
5549 case NON_LVALUE_EXPR:
5550 case VIEW_CONVERT_EXPR:
5553 /* ??? Sure they are handled, but get_inner_reference may return
5554 a different PBITSIZE, depending upon whether the expression is
5555 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5558 return (TYPE_MODE (TREE_TYPE (t))
5559 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5566 /* Given an rtx VALUE that may contain additions and multiplications, return
5567 an equivalent value that just refers to a register, memory, or constant.
5568 This is done by generating instructions to perform the arithmetic and
5569 returning a pseudo-register containing the value.
5571 The returned value may be a REG, SUBREG, MEM or constant. */
5574 force_operand (rtx value, rtx target)
5577 /* Use subtarget as the target for operand 0 of a binary operation. */
5578 rtx subtarget = get_subtarget (target);
5579 enum rtx_code code = GET_CODE (value);
5581 /* Check for subreg applied to an expression produced by loop optimizer. */
5583 && !REG_P (SUBREG_REG (value))
5584 && !MEM_P (SUBREG_REG (value)))
5586 value = simplify_gen_subreg (GET_MODE (value),
5587 force_reg (GET_MODE (SUBREG_REG (value)),
5588 force_operand (SUBREG_REG (value),
5590 GET_MODE (SUBREG_REG (value)),
5591 SUBREG_BYTE (value));
5592 code = GET_CODE (value);
5595 /* Check for a PIC address load. */
5596 if ((code == PLUS || code == MINUS)
5597 && XEXP (value, 0) == pic_offset_table_rtx
5598 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5599 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5600 || GET_CODE (XEXP (value, 1)) == CONST))
5603 subtarget = gen_reg_rtx (GET_MODE (value));
5604 emit_move_insn (subtarget, value);
5608 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5611 target = gen_reg_rtx (GET_MODE (value));
5612 convert_move (target, force_operand (XEXP (value, 0), NULL),
5613 code == ZERO_EXTEND);
5617 if (ARITHMETIC_P (value))
5619 op2 = XEXP (value, 1);
5620 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5622 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5625 op2 = negate_rtx (GET_MODE (value), op2);
5628 /* Check for an addition with OP2 a constant integer and our first
5629 operand a PLUS of a virtual register and something else. In that
5630 case, we want to emit the sum of the virtual register and the
5631 constant first and then add the other value. This allows virtual
5632 register instantiation to simply modify the constant rather than
5633 creating another one around this addition. */
5634 if (code == PLUS && GET_CODE (op2) == CONST_INT
5635 && GET_CODE (XEXP (value, 0)) == PLUS
5636 && REG_P (XEXP (XEXP (value, 0), 0))
5637 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5638 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5640 rtx temp = expand_simple_binop (GET_MODE (value), code,
5641 XEXP (XEXP (value, 0), 0), op2,
5642 subtarget, 0, OPTAB_LIB_WIDEN);
5643 return expand_simple_binop (GET_MODE (value), code, temp,
5644 force_operand (XEXP (XEXP (value,
5646 target, 0, OPTAB_LIB_WIDEN);
5649 op1 = force_operand (XEXP (value, 0), subtarget);
5650 op2 = force_operand (op2, NULL_RTX);
5654 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5656 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5657 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5658 target, 1, OPTAB_LIB_WIDEN);
5660 return expand_divmod (0,
5661 FLOAT_MODE_P (GET_MODE (value))
5662 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5663 GET_MODE (value), op1, op2, target, 0);
5666 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5670 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5674 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5678 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5679 target, 0, OPTAB_LIB_WIDEN);
5682 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5683 target, 1, OPTAB_LIB_WIDEN);
5686 if (UNARY_P (value))
5688 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5689 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5692 #ifdef INSN_SCHEDULING
5693 /* On machines that have insn scheduling, we want all memory reference to be
5694 explicit, so we need to deal with such paradoxical SUBREGs. */
5695 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5696 && (GET_MODE_SIZE (GET_MODE (value))
5697 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5699 = simplify_gen_subreg (GET_MODE (value),
5700 force_reg (GET_MODE (SUBREG_REG (value)),
5701 force_operand (SUBREG_REG (value),
5703 GET_MODE (SUBREG_REG (value)),
5704 SUBREG_BYTE (value));
5710 /* Subroutine of expand_expr: return nonzero iff there is no way that
5711 EXP can reference X, which is being modified. TOP_P is nonzero if this
5712 call is going to be used to determine whether we need a temporary
5713 for EXP, as opposed to a recursive call to this function.
5715 It is always safe for this routine to return zero since it merely
5716 searches for optimization opportunities. */
5719 safe_from_p (rtx x, tree exp, int top_p)
5725 /* If EXP has varying size, we MUST use a target since we currently
5726 have no way of allocating temporaries of variable size
5727 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5728 So we assume here that something at a higher level has prevented a
5729 clash. This is somewhat bogus, but the best we can do. Only
5730 do this when X is BLKmode and when we are at the top level. */
5731 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5732 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5733 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5734 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5735 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5737 && GET_MODE (x) == BLKmode)
5738 /* If X is in the outgoing argument area, it is always safe. */
5740 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5741 || (GET_CODE (XEXP (x, 0)) == PLUS
5742 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5745 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5746 find the underlying pseudo. */
5747 if (GET_CODE (x) == SUBREG)
5750 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5754 /* Now look at our tree code and possibly recurse. */
5755 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5758 exp_rtl = DECL_RTL_IF_SET (exp);
5765 if (TREE_CODE (exp) == TREE_LIST)
5769 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5771 exp = TREE_CHAIN (exp);
5774 if (TREE_CODE (exp) != TREE_LIST)
5775 return safe_from_p (x, exp, 0);
5778 else if (TREE_CODE (exp) == ERROR_MARK)
5779 return 1; /* An already-visited SAVE_EXPR? */
5784 /* The only case we look at here is the DECL_INITIAL inside a
5786 return (TREE_CODE (exp) != DECL_EXPR
5787 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5788 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5789 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5793 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5798 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5802 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5803 the expression. If it is set, we conflict iff we are that rtx or
5804 both are in memory. Otherwise, we check all operands of the
5805 expression recursively. */
5807 switch (TREE_CODE (exp))
5810 /* If the operand is static or we are static, we can't conflict.
5811 Likewise if we don't conflict with the operand at all. */
5812 if (staticp (TREE_OPERAND (exp, 0))
5813 || TREE_STATIC (exp)
5814 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5817 /* Otherwise, the only way this can conflict is if we are taking
5818 the address of a DECL a that address if part of X, which is
5820 exp = TREE_OPERAND (exp, 0);
5823 if (!DECL_RTL_SET_P (exp)
5824 || !MEM_P (DECL_RTL (exp)))
5827 exp_rtl = XEXP (DECL_RTL (exp), 0);
5833 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5834 get_alias_set (exp)))
5839 /* Assume that the call will clobber all hard registers and
5841 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5846 case WITH_CLEANUP_EXPR:
5847 case CLEANUP_POINT_EXPR:
5848 /* Lowered by gimplify.c. */
5852 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5858 /* If we have an rtx, we do not need to scan our operands. */
5862 nops = first_rtl_op (TREE_CODE (exp));
5863 for (i = 0; i < nops; i++)
5864 if (TREE_OPERAND (exp, i) != 0
5865 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5868 /* If this is a language-specific tree code, it may require
5869 special handling. */
5870 if ((unsigned int) TREE_CODE (exp)
5871 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5872 && !lang_hooks.safe_from_p (x, exp))
5876 /* If we have an rtl, find any enclosed object. Then see if we conflict
5880 if (GET_CODE (exp_rtl) == SUBREG)
5882 exp_rtl = SUBREG_REG (exp_rtl);
5884 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5888 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5889 are memory and they conflict. */
5890 return ! (rtx_equal_p (x, exp_rtl)
5891 || (MEM_P (x) && MEM_P (exp_rtl)
5892 && true_dependence (exp_rtl, VOIDmode, x,
5893 rtx_addr_varies_p)));
5896 /* If we reach here, it is safe. */
5901 /* Return the highest power of two that EXP is known to be a multiple of.
5902 This is used in updating alignment of MEMs in array references. */
5904 static unsigned HOST_WIDE_INT
5905 highest_pow2_factor (tree exp)
5907 unsigned HOST_WIDE_INT c0, c1;
5909 switch (TREE_CODE (exp))
5912 /* We can find the lowest bit that's a one. If the low
5913 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5914 We need to handle this case since we can find it in a COND_EXPR,
5915 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5916 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5918 if (TREE_CONSTANT_OVERFLOW (exp))
5919 return BIGGEST_ALIGNMENT;
5922 /* Note: tree_low_cst is intentionally not used here,
5923 we don't care about the upper bits. */
5924 c0 = TREE_INT_CST_LOW (exp);
5926 return c0 ? c0 : BIGGEST_ALIGNMENT;
5930 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5931 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5932 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5933 return MIN (c0, c1);
5936 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5937 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5940 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5942 if (integer_pow2p (TREE_OPERAND (exp, 1))
5943 && host_integerp (TREE_OPERAND (exp, 1), 1))
5945 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5946 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5947 return MAX (1, c0 / c1);
5951 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5953 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5956 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5959 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5960 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5961 return MIN (c0, c1);
5970 /* Similar, except that the alignment requirements of TARGET are
5971 taken into account. Assume it is at least as aligned as its
5972 type, unless it is a COMPONENT_REF in which case the layout of
5973 the structure gives the alignment. */
5975 static unsigned HOST_WIDE_INT
5976 highest_pow2_factor_for_target (tree target, tree exp)
5978 unsigned HOST_WIDE_INT target_align, factor;
5980 factor = highest_pow2_factor (exp);
5981 if (TREE_CODE (target) == COMPONENT_REF)
5982 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5984 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
5985 return MAX (factor, target_align);
5988 /* Expands variable VAR. */
5991 expand_var (tree var)
5993 if (DECL_EXTERNAL (var))
5996 if (TREE_STATIC (var))
5997 /* If this is an inlined copy of a static local variable,
5998 look up the original decl. */
5999 var = DECL_ORIGIN (var);
6001 if (TREE_STATIC (var)
6002 ? !TREE_ASM_WRITTEN (var)
6003 : !DECL_RTL_SET_P (var))
6005 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6007 /* Prepare a mem & address for the decl. */
6010 if (TREE_STATIC (var))
6013 x = gen_rtx_MEM (DECL_MODE (var),
6014 gen_reg_rtx (Pmode));
6016 set_mem_attributes (x, var, 1);
6017 SET_DECL_RTL (var, x);
6019 else if (lang_hooks.expand_decl (var))
6021 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6023 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6024 rest_of_decl_compilation (var, 0, 0);
6025 else if (TREE_CODE (var) == TYPE_DECL
6026 || TREE_CODE (var) == CONST_DECL
6027 || TREE_CODE (var) == FUNCTION_DECL
6028 || TREE_CODE (var) == LABEL_DECL)
6029 /* No expansion needed. */;
6035 /* Subroutine of expand_expr. Expand the two operands of a binary
6036 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6037 The value may be stored in TARGET if TARGET is nonzero. The
6038 MODIFIER argument is as documented by expand_expr. */
6041 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6042 enum expand_modifier modifier)
6044 if (! safe_from_p (target, exp1, 1))
6046 if (operand_equal_p (exp0, exp1, 0))
6048 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6049 *op1 = copy_rtx (*op0);
6053 /* If we need to preserve evaluation order, copy exp0 into its own
6054 temporary variable so that it can't be clobbered by exp1. */
6055 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6056 exp0 = save_expr (exp0);
6057 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6058 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6063 /* expand_expr: generate code for computing expression EXP.
6064 An rtx for the computed value is returned. The value is never null.
6065 In the case of a void EXP, const0_rtx is returned.
6067 The value may be stored in TARGET if TARGET is nonzero.
6068 TARGET is just a suggestion; callers must assume that
6069 the rtx returned may not be the same as TARGET.
6071 If TARGET is CONST0_RTX, it means that the value will be ignored.
6073 If TMODE is not VOIDmode, it suggests generating the
6074 result in mode TMODE. But this is done only when convenient.
6075 Otherwise, TMODE is ignored and the value generated in its natural mode.
6076 TMODE is just a suggestion; callers must assume that
6077 the rtx returned may not have mode TMODE.
6079 Note that TARGET may have neither TMODE nor MODE. In that case, it
6080 probably will not be used.
6082 If MODIFIER is EXPAND_SUM then when EXP is an addition
6083 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6084 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6085 products as above, or REG or MEM, or constant.
6086 Ordinarily in such cases we would output mul or add instructions
6087 and then return a pseudo reg containing the sum.
6089 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6090 it also marks a label as absolutely required (it can't be dead).
6091 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6092 This is used for outputting expressions used in initializers.
6094 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6095 with a constant address even if that address is not normally legitimate.
6096 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6098 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6099 a call parameter. Such targets require special care as we haven't yet
6100 marked TARGET so that it's safe from being trashed by libcalls. We
6101 don't want to use TARGET for anything but the final result;
6102 Intermediate values must go elsewhere. Additionally, calls to
6103 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6105 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6106 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6107 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6108 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6111 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6112 enum expand_modifier, rtx *);
6115 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6116 enum expand_modifier modifier, rtx *alt_rtl)
6119 rtx ret, last = NULL;
6121 /* Handle ERROR_MARK before anybody tries to access its type. */
6122 if (TREE_CODE (exp) == ERROR_MARK
6123 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6125 ret = CONST0_RTX (tmode);
6126 return ret ? ret : const0_rtx;
6129 if (flag_non_call_exceptions)
6131 rn = lookup_stmt_eh_region (exp);
6132 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6134 last = get_last_insn ();
6137 /* If this is an expression of some kind and it has an associated line
6138 number, then emit the line number before expanding the expression.
6140 We need to save and restore the file and line information so that
6141 errors discovered during expansion are emitted with the right
6142 information. It would be better of the diagnostic routines
6143 used the file/line information embedded in the tree nodes rather
6145 if (cfun && EXPR_HAS_LOCATION (exp))
6147 location_t saved_location = input_location;
6148 input_location = EXPR_LOCATION (exp);
6149 emit_line_note (input_location);
6151 /* Record where the insns produced belong. */
6152 record_block_change (TREE_BLOCK (exp));
6154 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6156 input_location = saved_location;
6160 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6163 /* If using non-call exceptions, mark all insns that may trap.
6164 expand_call() will mark CALL_INSNs before we get to this code,
6165 but it doesn't handle libcalls, and these may trap. */
6169 for (insn = next_real_insn (last); insn;
6170 insn = next_real_insn (insn))
6172 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6173 /* If we want exceptions for non-call insns, any
6174 may_trap_p instruction may throw. */
6175 && GET_CODE (PATTERN (insn)) != CLOBBER
6176 && GET_CODE (PATTERN (insn)) != USE
6177 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6179 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6189 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6190 enum expand_modifier modifier, rtx *alt_rtl)
6193 tree type = TREE_TYPE (exp);
6195 enum machine_mode mode;
6196 enum tree_code code = TREE_CODE (exp);
6198 rtx subtarget, original_target;
6201 bool reduce_bit_field = false;
6202 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6203 ? reduce_to_bit_field_precision ((expr), \
6208 mode = TYPE_MODE (type);
6209 unsignedp = TYPE_UNSIGNED (type);
6210 if (lang_hooks.reduce_bit_field_operations
6211 && TREE_CODE (type) == INTEGER_TYPE
6212 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6214 /* An operation in what may be a bit-field type needs the
6215 result to be reduced to the precision of the bit-field type,
6216 which is narrower than that of the type's mode. */
6217 reduce_bit_field = true;
6218 if (modifier == EXPAND_STACK_PARM)
6222 /* Use subtarget as the target for operand 0 of a binary operation. */
6223 subtarget = get_subtarget (target);
6224 original_target = target;
6225 ignore = (target == const0_rtx
6226 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6227 || code == CONVERT_EXPR || code == COND_EXPR
6228 || code == VIEW_CONVERT_EXPR)
6229 && TREE_CODE (type) == VOID_TYPE));
6231 /* If we are going to ignore this result, we need only do something
6232 if there is a side-effect somewhere in the expression. If there
6233 is, short-circuit the most common cases here. Note that we must
6234 not call expand_expr with anything but const0_rtx in case this
6235 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6239 if (! TREE_SIDE_EFFECTS (exp))
6242 /* Ensure we reference a volatile object even if value is ignored, but
6243 don't do this if all we are doing is taking its address. */
6244 if (TREE_THIS_VOLATILE (exp)
6245 && TREE_CODE (exp) != FUNCTION_DECL
6246 && mode != VOIDmode && mode != BLKmode
6247 && modifier != EXPAND_CONST_ADDRESS)
6249 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6251 temp = copy_to_reg (temp);
6255 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6256 || code == INDIRECT_REF)
6257 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6260 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6261 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6267 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6268 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6269 /* If the second operand has no side effects, just evaluate
6271 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6273 else if (code == BIT_FIELD_REF)
6275 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6276 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6277 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6284 /* If will do cse, generate all results into pseudo registers
6285 since 1) that allows cse to find more things
6286 and 2) otherwise cse could produce an insn the machine
6287 cannot support. An exception is a CONSTRUCTOR into a multi-word
6288 MEM: that's much more likely to be most efficient into the MEM.
6289 Another is a CALL_EXPR which must return in memory. */
6291 if (! cse_not_expected && mode != BLKmode && target
6292 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6293 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6294 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6301 tree function = decl_function_context (exp);
6303 temp = label_rtx (exp);
6304 temp = gen_rtx_LABEL_REF (Pmode, temp);
6306 if (function != current_function_decl
6308 LABEL_REF_NONLOCAL_P (temp) = 1;
6310 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6316 /* If a static var's type was incomplete when the decl was written,
6317 but the type is complete now, lay out the decl now. */
6318 if (DECL_SIZE (exp) == 0
6319 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6320 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6321 layout_decl (exp, 0);
6323 /* ... fall through ... */
6327 if (DECL_RTL (exp) == 0)
6330 /* Ensure variable marked as used even if it doesn't go through
6331 a parser. If it hasn't be used yet, write out an external
6333 if (! TREE_USED (exp))
6335 assemble_external (exp);
6336 TREE_USED (exp) = 1;
6339 /* Show we haven't gotten RTL for this yet. */
6342 /* Variables inherited from containing functions should have
6343 been lowered by this point. */
6344 context = decl_function_context (exp);
6346 && context != current_function_decl
6347 && !TREE_STATIC (exp)
6348 /* ??? C++ creates functions that are not TREE_STATIC. */
6349 && TREE_CODE (exp) != FUNCTION_DECL)
6352 /* This is the case of an array whose size is to be determined
6353 from its initializer, while the initializer is still being parsed.
6356 else if (MEM_P (DECL_RTL (exp))
6357 && REG_P (XEXP (DECL_RTL (exp), 0)))
6358 temp = validize_mem (DECL_RTL (exp));
6360 /* If DECL_RTL is memory, we are in the normal case and either
6361 the address is not valid or it is not a register and -fforce-addr
6362 is specified, get the address into a register. */
6364 else if (MEM_P (DECL_RTL (exp))
6365 && modifier != EXPAND_CONST_ADDRESS
6366 && modifier != EXPAND_SUM
6367 && modifier != EXPAND_INITIALIZER
6368 && (! memory_address_p (DECL_MODE (exp),
6369 XEXP (DECL_RTL (exp), 0))
6371 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6374 *alt_rtl = DECL_RTL (exp);
6375 temp = replace_equiv_address (DECL_RTL (exp),
6376 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6379 /* If we got something, return it. But first, set the alignment
6380 if the address is a register. */
6383 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6384 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6389 /* If the mode of DECL_RTL does not match that of the decl, it
6390 must be a promoted value. We return a SUBREG of the wanted mode,
6391 but mark it so that we know that it was already extended. */
6393 if (REG_P (DECL_RTL (exp))
6394 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6396 /* Get the signedness used for this variable. Ensure we get the
6397 same mode we got when the variable was declared. */
6398 if (GET_MODE (DECL_RTL (exp))
6399 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6400 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6403 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6404 SUBREG_PROMOTED_VAR_P (temp) = 1;
6405 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6409 return DECL_RTL (exp);
6412 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6413 TREE_INT_CST_HIGH (exp), mode);
6415 /* ??? If overflow is set, fold will have done an incomplete job,
6416 which can result in (plus xx (const_int 0)), which can get
6417 simplified by validate_replace_rtx during virtual register
6418 instantiation, which can result in unrecognizable insns.
6419 Avoid this by forcing all overflows into registers. */
6420 if (TREE_CONSTANT_OVERFLOW (exp)
6421 && modifier != EXPAND_INITIALIZER)
6422 temp = force_reg (mode, temp);
6427 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6428 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6429 return const_vector_from_tree (exp);
6431 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6432 TREE_VECTOR_CST_ELTS (exp)),
6433 ignore ? const0_rtx : target, tmode, modifier);
6436 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6439 /* If optimized, generate immediate CONST_DOUBLE
6440 which will be turned into memory by reload if necessary.
6442 We used to force a register so that loop.c could see it. But
6443 this does not allow gen_* patterns to perform optimizations with
6444 the constants. It also produces two insns in cases like "x = 1.0;".
6445 On most machines, floating-point constants are not permitted in
6446 many insns, so we'd end up copying it to a register in any case.
6448 Now, we do the copying in expand_binop, if appropriate. */
6449 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6450 TYPE_MODE (TREE_TYPE (exp)));
6453 /* Handle evaluating a complex constant in a CONCAT target. */
6454 if (original_target && GET_CODE (original_target) == CONCAT)
6456 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6459 rtarg = XEXP (original_target, 0);
6460 itarg = XEXP (original_target, 1);
6462 /* Move the real and imaginary parts separately. */
6463 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6464 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6467 emit_move_insn (rtarg, op0);
6469 emit_move_insn (itarg, op1);
6471 return original_target;
6474 /* ... fall through ... */
6477 temp = output_constant_def (exp, 1);
6479 /* temp contains a constant address.
6480 On RISC machines where a constant address isn't valid,
6481 make some insns to get that address into a register. */
6482 if (modifier != EXPAND_CONST_ADDRESS
6483 && modifier != EXPAND_INITIALIZER
6484 && modifier != EXPAND_SUM
6485 && (! memory_address_p (mode, XEXP (temp, 0))
6486 || flag_force_addr))
6487 return replace_equiv_address (temp,
6488 copy_rtx (XEXP (temp, 0)));
6493 tree val = TREE_OPERAND (exp, 0);
6494 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6496 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6498 /* We can indeed still hit this case, typically via builtin
6499 expanders calling save_expr immediately before expanding
6500 something. Assume this means that we only have to deal
6501 with non-BLKmode values. */
6502 if (GET_MODE (ret) == BLKmode)
6505 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6506 DECL_ARTIFICIAL (val) = 1;
6507 TREE_OPERAND (exp, 0) = val;
6509 if (!CONSTANT_P (ret))
6510 ret = copy_to_reg (ret);
6511 SET_DECL_RTL (val, ret);
6518 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6519 expand_goto (TREE_OPERAND (exp, 0));
6521 expand_computed_goto (TREE_OPERAND (exp, 0));
6525 /* If we don't need the result, just ensure we evaluate any
6531 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6532 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6537 /* All elts simple constants => refer to a constant in memory. But
6538 if this is a non-BLKmode mode, let it store a field at a time
6539 since that should make a CONST_INT or CONST_DOUBLE when we
6540 fold. Likewise, if we have a target we can use, it is best to
6541 store directly into the target unless the type is large enough
6542 that memcpy will be used. If we are making an initializer and
6543 all operands are constant, put it in memory as well.
6545 FIXME: Avoid trying to fill vector constructors piece-meal.
6546 Output them with output_constant_def below unless we're sure
6547 they're zeros. This should go away when vector initializers
6548 are treated like VECTOR_CST instead of arrays.
6550 else if ((TREE_STATIC (exp)
6551 && ((mode == BLKmode
6552 && ! (target != 0 && safe_from_p (target, exp, 1)))
6553 || TREE_ADDRESSABLE (exp)
6554 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6555 && (! MOVE_BY_PIECES_P
6556 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6558 && ! mostly_zeros_p (exp))))
6559 || ((modifier == EXPAND_INITIALIZER
6560 || modifier == EXPAND_CONST_ADDRESS)
6561 && TREE_CONSTANT (exp)))
6563 rtx constructor = output_constant_def (exp, 1);
6565 if (modifier != EXPAND_CONST_ADDRESS
6566 && modifier != EXPAND_INITIALIZER
6567 && modifier != EXPAND_SUM)
6568 constructor = validize_mem (constructor);
6574 /* Handle calls that pass values in multiple non-contiguous
6575 locations. The Irix 6 ABI has examples of this. */
6576 if (target == 0 || ! safe_from_p (target, exp, 1)
6577 || GET_CODE (target) == PARALLEL
6578 || modifier == EXPAND_STACK_PARM)
6580 = assign_temp (build_qualified_type (type,
6582 | (TREE_READONLY (exp)
6583 * TYPE_QUAL_CONST))),
6584 0, TREE_ADDRESSABLE (exp), 1);
6586 store_constructor (exp, target, 0, int_expr_size (exp));
6592 tree exp1 = TREE_OPERAND (exp, 0);
6594 if (modifier != EXPAND_WRITE)
6598 t = fold_read_from_constant_string (exp);
6600 return expand_expr (t, target, tmode, modifier);
6603 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6604 op0 = memory_address (mode, op0);
6605 temp = gen_rtx_MEM (mode, op0);
6606 set_mem_attributes (temp, exp, 0);
6608 /* If we are writing to this object and its type is a record with
6609 readonly fields, we must mark it as readonly so it will
6610 conflict with readonly references to those fields. */
6611 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6612 RTX_UNCHANGING_P (temp) = 1;
6619 #ifdef ENABLE_CHECKING
6620 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6625 tree array = TREE_OPERAND (exp, 0);
6626 tree low_bound = array_ref_low_bound (exp);
6627 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6630 /* Optimize the special-case of a zero lower bound.
6632 We convert the low_bound to sizetype to avoid some problems
6633 with constant folding. (E.g. suppose the lower bound is 1,
6634 and its mode is QI. Without the conversion, (ARRAY
6635 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6636 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6638 if (! integer_zerop (low_bound))
6639 index = size_diffop (index, convert (sizetype, low_bound));
6641 /* Fold an expression like: "foo"[2].
6642 This is not done in fold so it won't happen inside &.
6643 Don't fold if this is for wide characters since it's too
6644 difficult to do correctly and this is a very rare case. */
6646 if (modifier != EXPAND_CONST_ADDRESS
6647 && modifier != EXPAND_INITIALIZER
6648 && modifier != EXPAND_MEMORY)
6650 tree t = fold_read_from_constant_string (exp);
6653 return expand_expr (t, target, tmode, modifier);
6656 /* If this is a constant index into a constant array,
6657 just get the value from the array. Handle both the cases when
6658 we have an explicit constructor and when our operand is a variable
6659 that was declared const. */
6661 if (modifier != EXPAND_CONST_ADDRESS
6662 && modifier != EXPAND_INITIALIZER
6663 && modifier != EXPAND_MEMORY
6664 && TREE_CODE (array) == CONSTRUCTOR
6665 && ! TREE_SIDE_EFFECTS (array)
6666 && TREE_CODE (index) == INTEGER_CST
6667 && 0 > compare_tree_int (index,
6668 list_length (CONSTRUCTOR_ELTS
6669 (TREE_OPERAND (exp, 0)))))
6673 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6674 i = TREE_INT_CST_LOW (index);
6675 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6679 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6683 else if (optimize >= 1
6684 && modifier != EXPAND_CONST_ADDRESS
6685 && modifier != EXPAND_INITIALIZER
6686 && modifier != EXPAND_MEMORY
6687 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6688 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6689 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6690 && targetm.binds_local_p (array))
6692 if (TREE_CODE (index) == INTEGER_CST)
6694 tree init = DECL_INITIAL (array);
6696 if (TREE_CODE (init) == CONSTRUCTOR)
6700 for (elem = CONSTRUCTOR_ELTS (init);
6702 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6703 elem = TREE_CHAIN (elem))
6706 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6707 return expand_expr (fold (TREE_VALUE (elem)), target,
6710 else if (TREE_CODE (init) == STRING_CST
6711 && 0 > compare_tree_int (index,
6712 TREE_STRING_LENGTH (init)))
6714 tree type = TREE_TYPE (TREE_TYPE (init));
6715 enum machine_mode mode = TYPE_MODE (type);
6717 if (GET_MODE_CLASS (mode) == MODE_INT
6718 && GET_MODE_SIZE (mode) == 1)
6719 return gen_int_mode (TREE_STRING_POINTER (init)
6720 [TREE_INT_CST_LOW (index)], mode);
6725 goto normal_inner_ref;
6728 /* If the operand is a CONSTRUCTOR, we can just extract the
6729 appropriate field if it is present. */
6730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6734 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6735 elt = TREE_CHAIN (elt))
6736 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6737 /* We can normally use the value of the field in the
6738 CONSTRUCTOR. However, if this is a bitfield in
6739 an integral mode that we can fit in a HOST_WIDE_INT,
6740 we must mask only the number of bits in the bitfield,
6741 since this is done implicitly by the constructor. If
6742 the bitfield does not meet either of those conditions,
6743 we can't do this optimization. */
6744 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6745 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6747 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6748 <= HOST_BITS_PER_WIDE_INT))))
6750 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6751 && modifier == EXPAND_STACK_PARM)
6753 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6754 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6756 HOST_WIDE_INT bitsize
6757 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6758 enum machine_mode imode
6759 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6761 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6763 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6764 op0 = expand_and (imode, op0, op1, target);
6769 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6772 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6774 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6782 goto normal_inner_ref;
6785 case ARRAY_RANGE_REF:
6788 enum machine_mode mode1;
6789 HOST_WIDE_INT bitsize, bitpos;
6792 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6793 &mode1, &unsignedp, &volatilep);
6796 /* If we got back the original object, something is wrong. Perhaps
6797 we are evaluating an expression too early. In any event, don't
6798 infinitely recurse. */
6802 /* If TEM's type is a union of variable size, pass TARGET to the inner
6803 computation, since it will need a temporary and TARGET is known
6804 to have to do. This occurs in unchecked conversion in Ada. */
6808 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6809 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6811 && modifier != EXPAND_STACK_PARM
6812 ? target : NULL_RTX),
6814 (modifier == EXPAND_INITIALIZER
6815 || modifier == EXPAND_CONST_ADDRESS
6816 || modifier == EXPAND_STACK_PARM)
6817 ? modifier : EXPAND_NORMAL);
6819 /* If this is a constant, put it into a register if it is a
6820 legitimate constant and OFFSET is 0 and memory if it isn't. */
6821 if (CONSTANT_P (op0))
6823 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6824 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6826 op0 = force_reg (mode, op0);
6828 op0 = validize_mem (force_const_mem (mode, op0));
6831 /* Otherwise, if this object not in memory and we either have an
6832 offset or a BLKmode result, put it there. This case can't occur in
6833 C, but can in Ada if we have unchecked conversion of an expression
6834 from a scalar type to an array or record type or for an
6835 ARRAY_RANGE_REF whose type is BLKmode. */
6836 else if (!MEM_P (op0)
6838 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6840 tree nt = build_qualified_type (TREE_TYPE (tem),
6841 (TYPE_QUALS (TREE_TYPE (tem))
6842 | TYPE_QUAL_CONST));
6843 rtx memloc = assign_temp (nt, 1, 1, 1);
6845 emit_move_insn (memloc, op0);
6851 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6857 #ifdef POINTERS_EXTEND_UNSIGNED
6858 if (GET_MODE (offset_rtx) != Pmode)
6859 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6861 if (GET_MODE (offset_rtx) != ptr_mode)
6862 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6865 if (GET_MODE (op0) == BLKmode
6866 /* A constant address in OP0 can have VOIDmode, we must
6867 not try to call force_reg in that case. */
6868 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6870 && (bitpos % bitsize) == 0
6871 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6872 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6874 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6878 op0 = offset_address (op0, offset_rtx,
6879 highest_pow2_factor (offset));
6882 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6883 record its alignment as BIGGEST_ALIGNMENT. */
6884 if (MEM_P (op0) && bitpos == 0 && offset != 0
6885 && is_aligning_offset (offset, tem))
6886 set_mem_align (op0, BIGGEST_ALIGNMENT);
6888 /* Don't forget about volatility even if this is a bitfield. */
6889 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6891 if (op0 == orig_op0)
6892 op0 = copy_rtx (op0);
6894 MEM_VOLATILE_P (op0) = 1;
6897 /* The following code doesn't handle CONCAT.
6898 Assume only bitpos == 0 can be used for CONCAT, due to
6899 one element arrays having the same mode as its element. */
6900 if (GET_CODE (op0) == CONCAT)
6902 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6907 /* In cases where an aligned union has an unaligned object
6908 as a field, we might be extracting a BLKmode value from
6909 an integer-mode (e.g., SImode) object. Handle this case
6910 by doing the extract into an object as wide as the field
6911 (which we know to be the width of a basic mode), then
6912 storing into memory, and changing the mode to BLKmode. */
6913 if (mode1 == VOIDmode
6914 || REG_P (op0) || GET_CODE (op0) == SUBREG
6915 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6916 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6917 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6918 && modifier != EXPAND_CONST_ADDRESS
6919 && modifier != EXPAND_INITIALIZER)
6920 /* If the field isn't aligned enough to fetch as a memref,
6921 fetch it as a bit field. */
6922 || (mode1 != BLKmode
6923 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6924 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6926 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6927 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6928 && ((modifier == EXPAND_CONST_ADDRESS
6929 || modifier == EXPAND_INITIALIZER)
6931 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6932 || (bitpos % BITS_PER_UNIT != 0)))
6933 /* If the type and the field are a constant size and the
6934 size of the type isn't the same size as the bitfield,
6935 we must use bitfield operations. */
6937 && TYPE_SIZE (TREE_TYPE (exp))
6938 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6939 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6942 enum machine_mode ext_mode = mode;
6944 if (ext_mode == BLKmode
6945 && ! (target != 0 && MEM_P (op0)
6947 && bitpos % BITS_PER_UNIT == 0))
6948 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6950 if (ext_mode == BLKmode)
6953 target = assign_temp (type, 0, 1, 1);
6958 /* In this case, BITPOS must start at a byte boundary and
6959 TARGET, if specified, must be a MEM. */
6961 || (target != 0 && !MEM_P (target))
6962 || bitpos % BITS_PER_UNIT != 0)
6965 emit_block_move (target,
6966 adjust_address (op0, VOIDmode,
6967 bitpos / BITS_PER_UNIT),
6968 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6970 (modifier == EXPAND_STACK_PARM
6971 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6976 op0 = validize_mem (op0);
6978 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
6979 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6981 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
6982 (modifier == EXPAND_STACK_PARM
6983 ? NULL_RTX : target),
6984 ext_mode, ext_mode);
6986 /* If the result is a record type and BITSIZE is narrower than
6987 the mode of OP0, an integral mode, and this is a big endian
6988 machine, we must put the field into the high-order bits. */
6989 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6990 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6991 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6992 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6993 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6997 /* If the result type is BLKmode, store the data into a temporary
6998 of the appropriate type, but with the mode corresponding to the
6999 mode for the data we have (op0's mode). It's tempting to make
7000 this a constant type, since we know it's only being stored once,
7001 but that can cause problems if we are taking the address of this
7002 COMPONENT_REF because the MEM of any reference via that address
7003 will have flags corresponding to the type, which will not
7004 necessarily be constant. */
7005 if (mode == BLKmode)
7008 = assign_stack_temp_for_type
7009 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7011 emit_move_insn (new, op0);
7012 op0 = copy_rtx (new);
7013 PUT_MODE (op0, BLKmode);
7014 set_mem_attributes (op0, exp, 1);
7020 /* If the result is BLKmode, use that to access the object
7022 if (mode == BLKmode)
7025 /* Get a reference to just this component. */
7026 if (modifier == EXPAND_CONST_ADDRESS
7027 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7028 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7030 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7032 if (op0 == orig_op0)
7033 op0 = copy_rtx (op0);
7035 set_mem_attributes (op0, exp, 0);
7036 if (REG_P (XEXP (op0, 0)))
7037 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7039 MEM_VOLATILE_P (op0) |= volatilep;
7040 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7041 || modifier == EXPAND_CONST_ADDRESS
7042 || modifier == EXPAND_INITIALIZER)
7044 else if (target == 0)
7045 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7047 convert_move (target, op0, unsignedp);
7052 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7055 /* Check for a built-in function. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7057 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7059 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7061 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7062 == BUILT_IN_FRONTEND)
7063 return lang_hooks.expand_expr (exp, original_target,
7067 return expand_builtin (exp, target, subtarget, tmode, ignore);
7070 return expand_call (exp, target, ignore);
7072 case NON_LVALUE_EXPR:
7075 if (TREE_OPERAND (exp, 0) == error_mark_node)
7078 if (TREE_CODE (type) == UNION_TYPE)
7080 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7082 /* If both input and output are BLKmode, this conversion isn't doing
7083 anything except possibly changing memory attribute. */
7084 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7086 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7089 result = copy_rtx (result);
7090 set_mem_attributes (result, exp, 0);
7096 if (TYPE_MODE (type) != BLKmode)
7097 target = gen_reg_rtx (TYPE_MODE (type));
7099 target = assign_temp (type, 0, 1, 1);
7103 /* Store data into beginning of memory target. */
7104 store_expr (TREE_OPERAND (exp, 0),
7105 adjust_address (target, TYPE_MODE (valtype), 0),
7106 modifier == EXPAND_STACK_PARM ? 2 : 0);
7108 else if (REG_P (target))
7109 /* Store this field into a union of the proper type. */
7110 store_field (target,
7111 MIN ((int_size_in_bytes (TREE_TYPE
7112 (TREE_OPERAND (exp, 0)))
7114 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7115 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7116 VOIDmode, 0, type, 0);
7120 /* Return the entire union. */
7124 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7126 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7129 /* If the signedness of the conversion differs and OP0 is
7130 a promoted SUBREG, clear that indication since we now
7131 have to do the proper extension. */
7132 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7133 && GET_CODE (op0) == SUBREG)
7134 SUBREG_PROMOTED_VAR_P (op0) = 0;
7136 return REDUCE_BIT_FIELD (op0);
7139 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7140 op0 = REDUCE_BIT_FIELD (op0);
7141 if (GET_MODE (op0) == mode)
7144 /* If OP0 is a constant, just convert it into the proper mode. */
7145 if (CONSTANT_P (op0))
7147 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7148 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7150 if (modifier == EXPAND_INITIALIZER)
7151 return simplify_gen_subreg (mode, op0, inner_mode,
7152 subreg_lowpart_offset (mode,
7155 return convert_modes (mode, inner_mode, op0,
7156 TYPE_UNSIGNED (inner_type));
7159 if (modifier == EXPAND_INITIALIZER)
7160 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7164 convert_to_mode (mode, op0,
7165 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7167 convert_move (target, op0,
7168 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7171 case VIEW_CONVERT_EXPR:
7172 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7174 /* If the input and output modes are both the same, we are done.
7175 Otherwise, if neither mode is BLKmode and both are integral and within
7176 a word, we can use gen_lowpart. If neither is true, make sure the
7177 operand is in memory and convert the MEM to the new mode. */
7178 if (TYPE_MODE (type) == GET_MODE (op0))
7180 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7181 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7182 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7183 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7184 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7185 op0 = gen_lowpart (TYPE_MODE (type), op0);
7186 else if (!MEM_P (op0))
7188 /* If the operand is not a MEM, force it into memory. Since we
7189 are going to be be changing the mode of the MEM, don't call
7190 force_const_mem for constants because we don't allow pool
7191 constants to change mode. */
7192 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7194 if (TREE_ADDRESSABLE (exp))
7197 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7199 = assign_stack_temp_for_type
7200 (TYPE_MODE (inner_type),
7201 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7203 emit_move_insn (target, op0);
7207 /* At this point, OP0 is in the correct mode. If the output type is such
7208 that the operand is known to be aligned, indicate that it is.
7209 Otherwise, we need only be concerned about alignment for non-BLKmode
7213 op0 = copy_rtx (op0);
7215 if (TYPE_ALIGN_OK (type))
7216 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7217 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7218 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7220 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7221 HOST_WIDE_INT temp_size
7222 = MAX (int_size_in_bytes (inner_type),
7223 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7224 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7225 temp_size, 0, type);
7226 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7228 if (TREE_ADDRESSABLE (exp))
7231 if (GET_MODE (op0) == BLKmode)
7232 emit_block_move (new_with_op0_mode, op0,
7233 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7234 (modifier == EXPAND_STACK_PARM
7235 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7237 emit_move_insn (new_with_op0_mode, op0);
7242 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7248 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7249 something else, make sure we add the register to the constant and
7250 then to the other thing. This case can occur during strength
7251 reduction and doing it this way will produce better code if the
7252 frame pointer or argument pointer is eliminated.
7254 fold-const.c will ensure that the constant is always in the inner
7255 PLUS_EXPR, so the only case we need to do anything about is if
7256 sp, ap, or fp is our second argument, in which case we must swap
7257 the innermost first argument and our second argument. */
7259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7260 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7261 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7262 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7263 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7264 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7266 tree t = TREE_OPERAND (exp, 1);
7268 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7269 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7272 /* If the result is to be ptr_mode and we are adding an integer to
7273 something, we might be forming a constant. So try to use
7274 plus_constant. If it produces a sum and we can't accept it,
7275 use force_operand. This allows P = &ARR[const] to generate
7276 efficient code on machines where a SYMBOL_REF is not a valid
7279 If this is an EXPAND_SUM call, always return the sum. */
7280 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7281 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7283 if (modifier == EXPAND_STACK_PARM)
7285 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7286 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7287 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7291 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7293 /* Use immed_double_const to ensure that the constant is
7294 truncated according to the mode of OP1, then sign extended
7295 to a HOST_WIDE_INT. Using the constant directly can result
7296 in non-canonical RTL in a 64x32 cross compile. */
7298 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7300 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7301 op1 = plus_constant (op1, INTVAL (constant_part));
7302 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7303 op1 = force_operand (op1, target);
7304 return REDUCE_BIT_FIELD (op1);
7307 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7308 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7309 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7314 (modifier == EXPAND_INITIALIZER
7315 ? EXPAND_INITIALIZER : EXPAND_SUM));
7316 if (! CONSTANT_P (op0))
7318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7319 VOIDmode, modifier);
7320 /* Return a PLUS if modifier says it's OK. */
7321 if (modifier == EXPAND_SUM
7322 || modifier == EXPAND_INITIALIZER)
7323 return simplify_gen_binary (PLUS, mode, op0, op1);
7326 /* Use immed_double_const to ensure that the constant is
7327 truncated according to the mode of OP1, then sign extended
7328 to a HOST_WIDE_INT. Using the constant directly can result
7329 in non-canonical RTL in a 64x32 cross compile. */
7331 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7333 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7334 op0 = plus_constant (op0, INTVAL (constant_part));
7335 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7336 op0 = force_operand (op0, target);
7337 return REDUCE_BIT_FIELD (op0);
7341 /* No sense saving up arithmetic to be done
7342 if it's all in the wrong mode to form part of an address.
7343 And force_operand won't know whether to sign-extend or
7345 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7346 || mode != ptr_mode)
7348 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7349 subtarget, &op0, &op1, 0);
7350 if (op0 == const0_rtx)
7352 if (op1 == const0_rtx)
7357 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7358 subtarget, &op0, &op1, modifier);
7359 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7362 /* For initializers, we are allowed to return a MINUS of two
7363 symbolic constants. Here we handle all cases when both operands
7365 /* Handle difference of two symbolic constants,
7366 for the sake of an initializer. */
7367 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7368 && really_constant_p (TREE_OPERAND (exp, 0))
7369 && really_constant_p (TREE_OPERAND (exp, 1)))
7371 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7372 NULL_RTX, &op0, &op1, modifier);
7374 /* If the last operand is a CONST_INT, use plus_constant of
7375 the negated constant. Else make the MINUS. */
7376 if (GET_CODE (op1) == CONST_INT)
7377 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7379 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7382 /* No sense saving up arithmetic to be done
7383 if it's all in the wrong mode to form part of an address.
7384 And force_operand won't know whether to sign-extend or
7386 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7387 || mode != ptr_mode)
7390 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7391 subtarget, &op0, &op1, modifier);
7393 /* Convert A - const to A + (-const). */
7394 if (GET_CODE (op1) == CONST_INT)
7396 op1 = negate_rtx (mode, op1);
7397 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7403 /* If first operand is constant, swap them.
7404 Thus the following special case checks need only
7405 check the second operand. */
7406 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7408 tree t1 = TREE_OPERAND (exp, 0);
7409 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7410 TREE_OPERAND (exp, 1) = t1;
7413 /* Attempt to return something suitable for generating an
7414 indexed address, for machines that support that. */
7416 if (modifier == EXPAND_SUM && mode == ptr_mode
7417 && host_integerp (TREE_OPERAND (exp, 1), 0))
7419 tree exp1 = TREE_OPERAND (exp, 1);
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7425 op0 = force_operand (op0, NULL_RTX);
7427 op0 = copy_to_mode_reg (mode, op0);
7429 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7430 gen_int_mode (tree_low_cst (exp1, 0),
7431 TYPE_MODE (TREE_TYPE (exp1)))));
7434 if (modifier == EXPAND_STACK_PARM)
7437 /* Check for multiplying things that have been extended
7438 from a narrower type. If this machine supports multiplying
7439 in that narrower type with a result in the desired type,
7440 do it that way, and avoid the explicit type-conversion. */
7441 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7442 && TREE_CODE (type) == INTEGER_TYPE
7443 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7444 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7445 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7446 && int_fits_type_p (TREE_OPERAND (exp, 1),
7447 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7448 /* Don't use a widening multiply if a shift will do. */
7449 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7450 > HOST_BITS_PER_WIDE_INT)
7451 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7453 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7454 && (TYPE_PRECISION (TREE_TYPE
7455 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7456 == TYPE_PRECISION (TREE_TYPE
7458 (TREE_OPERAND (exp, 0), 0))))
7459 /* If both operands are extended, they must either both
7460 be zero-extended or both be sign-extended. */
7461 && (TYPE_UNSIGNED (TREE_TYPE
7462 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7463 == TYPE_UNSIGNED (TREE_TYPE
7465 (TREE_OPERAND (exp, 0), 0)))))))
7467 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7468 enum machine_mode innermode = TYPE_MODE (op0type);
7469 bool zextend_p = TYPE_UNSIGNED (op0type);
7470 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7471 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7473 if (mode == GET_MODE_WIDER_MODE (innermode))
7475 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7477 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7478 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7479 TREE_OPERAND (exp, 1),
7480 NULL_RTX, &op0, &op1, 0);
7482 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7483 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7484 NULL_RTX, &op0, &op1, 0);
7487 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7488 && innermode == word_mode)
7491 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7492 NULL_RTX, VOIDmode, 0);
7493 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7494 op1 = convert_modes (innermode, mode,
7495 expand_expr (TREE_OPERAND (exp, 1),
7496 NULL_RTX, VOIDmode, 0),
7499 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7500 NULL_RTX, VOIDmode, 0);
7501 temp = expand_binop (mode, other_optab, op0, op1, target,
7502 unsignedp, OPTAB_LIB_WIDEN);
7503 hipart = gen_highpart (innermode, temp);
7504 htem = expand_mult_highpart_adjust (innermode, hipart,
7508 emit_move_insn (hipart, htem);
7509 return REDUCE_BIT_FIELD (temp);
7513 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7514 subtarget, &op0, &op1, 0);
7515 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7517 case TRUNC_DIV_EXPR:
7518 case FLOOR_DIV_EXPR:
7520 case ROUND_DIV_EXPR:
7521 case EXACT_DIV_EXPR:
7522 if (modifier == EXPAND_STACK_PARM)
7524 /* Possible optimization: compute the dividend with EXPAND_SUM
7525 then if the divisor is constant can optimize the case
7526 where some terms of the dividend have coeffs divisible by it. */
7527 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7528 subtarget, &op0, &op1, 0);
7529 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7532 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7533 expensive divide. If not, combine will rebuild the original
7535 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7536 && TREE_CODE (type) == REAL_TYPE
7537 && !real_onep (TREE_OPERAND (exp, 0)))
7538 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7539 build2 (RDIV_EXPR, type,
7540 build_real (type, dconst1),
7541 TREE_OPERAND (exp, 1))),
7542 target, tmode, modifier);
7546 case TRUNC_MOD_EXPR:
7547 case FLOOR_MOD_EXPR:
7549 case ROUND_MOD_EXPR:
7550 if (modifier == EXPAND_STACK_PARM)
7552 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7553 subtarget, &op0, &op1, 0);
7554 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7556 case FIX_ROUND_EXPR:
7557 case FIX_FLOOR_EXPR:
7559 abort (); /* Not used for C. */
7561 case FIX_TRUNC_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7563 if (target == 0 || modifier == EXPAND_STACK_PARM)
7564 target = gen_reg_rtx (mode);
7565 expand_fix (target, op0, unsignedp);
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7570 if (target == 0 || modifier == EXPAND_STACK_PARM)
7571 target = gen_reg_rtx (mode);
7572 /* expand_float can't figure out what to do if FROM has VOIDmode.
7573 So give it the correct mode. With -O, cse will optimize this. */
7574 if (GET_MODE (op0) == VOIDmode)
7575 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7577 expand_float (target, op0,
7578 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7582 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7583 if (modifier == EXPAND_STACK_PARM)
7585 temp = expand_unop (mode,
7586 optab_for_tree_code (NEGATE_EXPR, type),
7590 return REDUCE_BIT_FIELD (temp);
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7594 if (modifier == EXPAND_STACK_PARM)
7597 /* ABS_EXPR is not valid for complex arguments. */
7598 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7599 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7602 /* Unsigned abs is simply the operand. Testing here means we don't
7603 risk generating incorrect code below. */
7604 if (TYPE_UNSIGNED (type))
7607 return expand_abs (mode, op0, target, unsignedp,
7608 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7612 target = original_target;
7614 || modifier == EXPAND_STACK_PARM
7615 || (MEM_P (target) && MEM_VOLATILE_P (target))
7616 || GET_MODE (target) != mode
7618 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7619 target = gen_reg_rtx (mode);
7620 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7621 target, &op0, &op1, 0);
7623 /* First try to do it with a special MIN or MAX instruction.
7624 If that does not win, use a conditional jump to select the proper
7626 this_optab = optab_for_tree_code (code, type);
7627 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7632 /* At this point, a MEM target is no longer useful; we will get better
7636 target = gen_reg_rtx (mode);
7638 /* If op1 was placed in target, swap op0 and op1. */
7639 if (target != op0 && target == op1)
7647 emit_move_insn (target, op0);
7649 op0 = gen_label_rtx ();
7651 /* If this mode is an integer too wide to compare properly,
7652 compare word by word. Rely on cse to optimize constant cases. */
7653 if (GET_MODE_CLASS (mode) == MODE_INT
7654 && ! can_compare_p (GE, mode, ccp_jump))
7656 if (code == MAX_EXPR)
7657 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7660 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7665 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7666 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7668 emit_move_insn (target, op1);
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7674 if (modifier == EXPAND_STACK_PARM)
7676 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7681 /* ??? Can optimize bitwise operations with one arg constant.
7682 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7683 and (a bitwise1 b) bitwise2 b (etc)
7684 but that is probably not worth while. */
7686 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7687 boolean values when we want in all cases to compute both of them. In
7688 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7689 as actual zero-or-1 values and then bitwise anding. In cases where
7690 there cannot be any side effects, better code would be made by
7691 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7692 how to recognize those cases. */
7694 case TRUTH_AND_EXPR:
7695 code = BIT_AND_EXPR;
7700 code = BIT_IOR_EXPR;
7704 case TRUTH_XOR_EXPR:
7705 code = BIT_XOR_EXPR;
7713 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7715 if (modifier == EXPAND_STACK_PARM)
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7718 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7721 /* Could determine the answer when only additive constants differ. Also,
7722 the addition of one can be handled by changing the condition. */
7729 case UNORDERED_EXPR:
7737 temp = do_store_flag (exp,
7738 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7739 tmode != VOIDmode ? tmode : mode, 0);
7743 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7744 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7746 && REG_P (original_target)
7747 && (GET_MODE (original_target)
7748 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7750 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7753 /* If temp is constant, we can just compute the result. */
7754 if (GET_CODE (temp) == CONST_INT)
7756 if (INTVAL (temp) != 0)
7757 emit_move_insn (target, const1_rtx);
7759 emit_move_insn (target, const0_rtx);
7764 if (temp != original_target)
7766 enum machine_mode mode1 = GET_MODE (temp);
7767 if (mode1 == VOIDmode)
7768 mode1 = tmode != VOIDmode ? tmode : mode;
7770 temp = copy_to_mode_reg (mode1, temp);
7773 op1 = gen_label_rtx ();
7774 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7775 GET_MODE (temp), unsignedp, op1);
7776 emit_move_insn (temp, const1_rtx);
7781 case TRUTH_NOT_EXPR:
7782 if (modifier == EXPAND_STACK_PARM)
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7785 /* The parser is careful to generate TRUTH_NOT_EXPR
7786 only with operands that are always zero or one. */
7787 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7788 target, 1, OPTAB_LIB_WIDEN);
7793 case STATEMENT_LIST:
7795 tree_stmt_iterator iter;
7800 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7801 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7806 /* If it's void, we don't need to worry about computing a value. */
7807 if (VOID_TYPE_P (TREE_TYPE (exp)))
7809 tree pred = TREE_OPERAND (exp, 0);
7810 tree then_ = TREE_OPERAND (exp, 1);
7811 tree else_ = TREE_OPERAND (exp, 2);
7813 if (TREE_CODE (then_) != GOTO_EXPR
7814 || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
7815 || TREE_CODE (else_) != GOTO_EXPR
7816 || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
7819 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7820 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7823 /* Note that COND_EXPRs whose type is a structure or union
7824 are required to be constructed to contain assignments of
7825 a temporary variable, so that we can evaluate them here
7826 for side effect only. If type is void, we must do likewise. */
7828 if (TREE_ADDRESSABLE (type)
7830 || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
7831 || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
7834 /* If we are not to produce a result, we have no target. Otherwise,
7835 if a target was specified use it; it will not be used as an
7836 intermediate target unless it is safe. If no target, use a
7839 if (modifier != EXPAND_STACK_PARM
7841 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7842 && GET_MODE (original_target) == mode
7843 #ifdef HAVE_conditional_move
7844 && (! can_conditionally_move_p (mode)
7845 || REG_P (original_target))
7847 && !MEM_P (original_target))
7848 temp = original_target;
7850 temp = assign_temp (type, 0, 0, 1);
7852 do_pending_stack_adjust ();
7854 op0 = gen_label_rtx ();
7855 op1 = gen_label_rtx ();
7856 jumpifnot (TREE_OPERAND (exp, 0), op0);
7857 store_expr (TREE_OPERAND (exp, 1), temp,
7858 modifier == EXPAND_STACK_PARM ? 2 : 0);
7860 emit_jump_insn (gen_jump (op1));
7863 store_expr (TREE_OPERAND (exp, 2), temp,
7864 modifier == EXPAND_STACK_PARM ? 2 : 0);
7872 /* If lhs is complex, expand calls in rhs before computing it.
7873 That's so we don't compute a pointer and save it over a
7874 call. If lhs is simple, compute it first so we can give it
7875 as a target if the rhs is just a call. This avoids an
7876 extra temp and copy and that prevents a partial-subsumption
7877 which makes bad code. Actually we could treat
7878 component_ref's of vars like vars. */
7880 tree lhs = TREE_OPERAND (exp, 0);
7881 tree rhs = TREE_OPERAND (exp, 1);
7885 /* Check for |= or &= of a bitfield of size one into another bitfield
7886 of size 1. In this case, (unless we need the result of the
7887 assignment) we can do this more efficiently with a
7888 test followed by an assignment, if necessary.
7890 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7891 things change so we do, this code should be enhanced to
7894 && TREE_CODE (lhs) == COMPONENT_REF
7895 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7896 || TREE_CODE (rhs) == BIT_AND_EXPR)
7897 && TREE_OPERAND (rhs, 0) == lhs
7898 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7899 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7900 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7902 rtx label = gen_label_rtx ();
7904 do_jump (TREE_OPERAND (rhs, 1),
7905 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7906 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7907 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7908 (TREE_CODE (rhs) == BIT_IOR_EXPR
7910 : integer_zero_node)),
7912 do_pending_stack_adjust ();
7917 temp = expand_assignment (lhs, rhs, ! ignore);
7923 if (!TREE_OPERAND (exp, 0))
7924 expand_null_return ();
7926 expand_return (TREE_OPERAND (exp, 0));
7930 if (modifier == EXPAND_STACK_PARM)
7932 /* If we are taking the address of something erroneous, just
7934 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7936 /* If we are taking the address of a constant and are at the
7937 top level, we have to use output_constant_def since we can't
7938 call force_const_mem at top level. */
7940 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7941 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
7943 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
7946 /* We make sure to pass const0_rtx down if we came in with
7947 ignore set, to avoid doing the cleanups twice for something. */
7948 op0 = expand_expr (TREE_OPERAND (exp, 0),
7949 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7950 (modifier == EXPAND_INITIALIZER
7951 ? modifier : EXPAND_CONST_ADDRESS));
7953 /* If we are going to ignore the result, OP0 will have been set
7954 to const0_rtx, so just return it. Don't get confused and
7955 think we are taking the address of the constant. */
7959 /* We would like the object in memory. If it is a constant, we can
7960 have it be statically allocated into memory. For a non-constant,
7961 we need to allocate some memory and store the value into it. */
7963 if (CONSTANT_P (op0))
7964 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7966 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
7967 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
7968 || GET_CODE (op0) == LO_SUM)
7970 /* If this object is in a register, it can't be BLKmode. */
7971 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7972 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7974 if (GET_CODE (op0) == PARALLEL)
7975 /* Handle calls that pass values in multiple
7976 non-contiguous locations. The Irix 6 ABI has examples
7978 emit_group_store (memloc, op0, inner_type,
7979 int_size_in_bytes (inner_type));
7981 emit_move_insn (memloc, op0);
7989 mark_temp_addr_taken (op0);
7990 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7992 op0 = XEXP (op0, 0);
7993 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
7994 op0 = convert_memory_address (ptr_mode, op0);
7998 /* If OP0 is not aligned as least as much as the type requires, we
7999 need to make a temporary, copy OP0 to it, and take the address of
8000 the temporary. We want to use the alignment of the type, not of
8001 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8002 the test for BLKmode means that can't happen. The test for
8003 BLKmode is because we never make mis-aligned MEMs with
8006 We don't need to do this at all if the machine doesn't have
8007 strict alignment. */
8008 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8009 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8011 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8013 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8016 if (TYPE_ALIGN_OK (inner_type))
8019 if (TREE_ADDRESSABLE (inner_type))
8021 /* We can't make a bitwise copy of this object, so fail. */
8022 error ("cannot take the address of an unaligned member");
8026 new = assign_stack_temp_for_type
8027 (TYPE_MODE (inner_type),
8028 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8029 : int_size_in_bytes (inner_type),
8030 1, build_qualified_type (inner_type,
8031 (TYPE_QUALS (inner_type)
8032 | TYPE_QUAL_CONST)));
8034 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8035 (modifier == EXPAND_STACK_PARM
8036 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8041 op0 = force_operand (XEXP (op0, 0), target);
8046 && modifier != EXPAND_CONST_ADDRESS
8047 && modifier != EXPAND_INITIALIZER
8048 && modifier != EXPAND_SUM)
8049 op0 = force_reg (Pmode, op0);
8052 && ! REG_USERVAR_P (op0))
8053 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8055 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8056 op0 = convert_memory_address (ptr_mode, op0);
8060 /* COMPLEX type for Extended Pascal & Fortran */
8063 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8066 /* Get the rtx code of the operands. */
8067 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8068 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8071 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8075 /* Move the real (op0) and imaginary (op1) parts to their location. */
8076 emit_move_insn (gen_realpart (mode, target), op0);
8077 emit_move_insn (gen_imagpart (mode, target), op1);
8079 insns = get_insns ();
8082 /* Complex construction should appear as a single unit. */
8083 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8084 each with a separate pseudo as destination.
8085 It's not correct for flow to treat them as a unit. */
8086 if (GET_CODE (target) != CONCAT)
8087 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8096 return gen_realpart (mode, op0);
8099 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8100 return gen_imagpart (mode, op0);
8103 expand_resx_expr (exp);
8106 case TRY_CATCH_EXPR:
8108 case EH_FILTER_EXPR:
8109 case TRY_FINALLY_EXPR:
8110 /* Lowered by tree-eh.c. */
8113 case WITH_CLEANUP_EXPR:
8114 case CLEANUP_POINT_EXPR:
8116 case CASE_LABEL_EXPR:
8122 case PREINCREMENT_EXPR:
8123 case PREDECREMENT_EXPR:
8124 case POSTINCREMENT_EXPR:
8125 case POSTDECREMENT_EXPR:
8128 case LABELED_BLOCK_EXPR:
8129 case EXIT_BLOCK_EXPR:
8130 case TRUTH_ANDIF_EXPR:
8131 case TRUTH_ORIF_EXPR:
8132 /* Lowered by gimplify.c. */
8136 return get_exception_pointer (cfun);
8139 return get_exception_filter (cfun);
8142 /* Function descriptors are not valid except for as
8143 initialization constants, and should not be expanded. */
8151 expand_label (TREE_OPERAND (exp, 0));
8155 expand_asm_expr (exp);
8158 case WITH_SIZE_EXPR:
8159 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8160 have pulled out the size to use in whatever context it needed. */
8161 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8165 return lang_hooks.expand_expr (exp, original_target, tmode,
8169 /* Here to do an ordinary binary operator. */
8171 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8172 subtarget, &op0, &op1, 0);
8174 this_optab = optab_for_tree_code (code, type);
8176 if (modifier == EXPAND_STACK_PARM)
8178 temp = expand_binop (mode, this_optab, op0, op1, target,
8179 unsignedp, OPTAB_LIB_WIDEN);
8182 return REDUCE_BIT_FIELD (temp);
8184 #undef REDUCE_BIT_FIELD
8186 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8187 signedness of TYPE), possibly returning the result in TARGET. */
8189 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8191 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8192 if (target && GET_MODE (target) != GET_MODE (exp))
8194 if (TYPE_UNSIGNED (type))
8197 if (prec < HOST_BITS_PER_WIDE_INT)
8198 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8201 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8202 ((unsigned HOST_WIDE_INT) 1
8203 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8205 return expand_and (GET_MODE (exp), exp, mask, target);
8209 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8210 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8211 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8215 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8216 when applied to the address of EXP produces an address known to be
8217 aligned more than BIGGEST_ALIGNMENT. */
8220 is_aligning_offset (tree offset, tree exp)
8222 /* Strip off any conversions. */
8223 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8224 || TREE_CODE (offset) == NOP_EXPR
8225 || TREE_CODE (offset) == CONVERT_EXPR)
8226 offset = TREE_OPERAND (offset, 0);
8228 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8229 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8230 if (TREE_CODE (offset) != BIT_AND_EXPR
8231 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8232 || compare_tree_int (TREE_OPERAND (offset, 1),
8233 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8234 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8237 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8238 It must be NEGATE_EXPR. Then strip any more conversions. */
8239 offset = TREE_OPERAND (offset, 0);
8240 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8241 || TREE_CODE (offset) == NOP_EXPR
8242 || TREE_CODE (offset) == CONVERT_EXPR)
8243 offset = TREE_OPERAND (offset, 0);
8245 if (TREE_CODE (offset) != NEGATE_EXPR)
8248 offset = TREE_OPERAND (offset, 0);
8249 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8250 || TREE_CODE (offset) == NOP_EXPR
8251 || TREE_CODE (offset) == CONVERT_EXPR)
8252 offset = TREE_OPERAND (offset, 0);
8254 /* This must now be the address of EXP. */
8255 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8258 /* Return the tree node if an ARG corresponds to a string constant or zero
8259 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8260 in bytes within the string that ARG is accessing. The type of the
8261 offset will be `sizetype'. */
8264 string_constant (tree arg, tree *ptr_offset)
8268 if (TREE_CODE (arg) == ADDR_EXPR
8269 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8271 *ptr_offset = size_zero_node;
8272 return TREE_OPERAND (arg, 0);
8274 if (TREE_CODE (arg) == ADDR_EXPR
8275 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8276 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8278 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8279 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8281 else if (TREE_CODE (arg) == PLUS_EXPR)
8283 tree arg0 = TREE_OPERAND (arg, 0);
8284 tree arg1 = TREE_OPERAND (arg, 1);
8289 if (TREE_CODE (arg0) == ADDR_EXPR
8290 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8292 *ptr_offset = convert (sizetype, arg1);
8293 return TREE_OPERAND (arg0, 0);
8295 else if (TREE_CODE (arg1) == ADDR_EXPR
8296 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8298 *ptr_offset = convert (sizetype, arg0);
8299 return TREE_OPERAND (arg1, 0);
8306 /* Generate code to calculate EXP using a store-flag instruction
8307 and return an rtx for the result. EXP is either a comparison
8308 or a TRUTH_NOT_EXPR whose operand is a comparison.
8310 If TARGET is nonzero, store the result there if convenient.
8312 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8315 Return zero if there is no suitable set-flag instruction
8316 available on this machine.
8318 Once expand_expr has been called on the arguments of the comparison,
8319 we are committed to doing the store flag, since it is not safe to
8320 re-evaluate the expression. We emit the store-flag insn by calling
8321 emit_store_flag, but only expand the arguments if we have a reason
8322 to believe that emit_store_flag will be successful. If we think that
8323 it will, but it isn't, we have to simulate the store-flag with a
8324 set/jump/set sequence. */
8327 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8330 tree arg0, arg1, type;
8332 enum machine_mode operand_mode;
8336 enum insn_code icode;
8337 rtx subtarget = target;
8340 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8341 result at the end. We can't simply invert the test since it would
8342 have already been inverted if it were valid. This case occurs for
8343 some floating-point comparisons. */
8345 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8346 invert = 1, exp = TREE_OPERAND (exp, 0);
8348 arg0 = TREE_OPERAND (exp, 0);
8349 arg1 = TREE_OPERAND (exp, 1);
8351 /* Don't crash if the comparison was erroneous. */
8352 if (arg0 == error_mark_node || arg1 == error_mark_node)
8355 type = TREE_TYPE (arg0);
8356 operand_mode = TYPE_MODE (type);
8357 unsignedp = TYPE_UNSIGNED (type);
8359 /* We won't bother with BLKmode store-flag operations because it would mean
8360 passing a lot of information to emit_store_flag. */
8361 if (operand_mode == BLKmode)
8364 /* We won't bother with store-flag operations involving function pointers
8365 when function pointers must be canonicalized before comparisons. */
8366 #ifdef HAVE_canonicalize_funcptr_for_compare
8367 if (HAVE_canonicalize_funcptr_for_compare
8368 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8369 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8371 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8372 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8373 == FUNCTION_TYPE))))
8380 /* Get the rtx comparison code to use. We know that EXP is a comparison
8381 operation of some type. Some comparisons against 1 and -1 can be
8382 converted to comparisons with zero. Do so here so that the tests
8383 below will be aware that we have a comparison with zero. These
8384 tests will not catch constants in the first operand, but constants
8385 are rarely passed as the first operand. */
8387 switch (TREE_CODE (exp))
8396 if (integer_onep (arg1))
8397 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8399 code = unsignedp ? LTU : LT;
8402 if (! unsignedp && integer_all_onesp (arg1))
8403 arg1 = integer_zero_node, code = LT;
8405 code = unsignedp ? LEU : LE;
8408 if (! unsignedp && integer_all_onesp (arg1))
8409 arg1 = integer_zero_node, code = GE;
8411 code = unsignedp ? GTU : GT;
8414 if (integer_onep (arg1))
8415 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8417 code = unsignedp ? GEU : GE;
8420 case UNORDERED_EXPR:
8449 /* Put a constant second. */
8450 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8452 tem = arg0; arg0 = arg1; arg1 = tem;
8453 code = swap_condition (code);
8456 /* If this is an equality or inequality test of a single bit, we can
8457 do this by shifting the bit being tested to the low-order bit and
8458 masking the result with the constant 1. If the condition was EQ,
8459 we xor it with 1. This does not require an scc insn and is faster
8460 than an scc insn even if we have it.
8462 The code to make this transformation was moved into fold_single_bit_test,
8463 so we just call into the folder and expand its result. */
8465 if ((code == NE || code == EQ)
8466 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8467 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8469 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8470 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8472 target, VOIDmode, EXPAND_NORMAL);
8475 /* Now see if we are likely to be able to do this. Return if not. */
8476 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8479 icode = setcc_gen_code[(int) code];
8480 if (icode == CODE_FOR_nothing
8481 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8483 /* We can only do this if it is one of the special cases that
8484 can be handled without an scc insn. */
8485 if ((code == LT && integer_zerop (arg1))
8486 || (! only_cheap && code == GE && integer_zerop (arg1)))
8488 else if (BRANCH_COST >= 0
8489 && ! only_cheap && (code == NE || code == EQ)
8490 && TREE_CODE (type) != REAL_TYPE
8491 && ((abs_optab->handlers[(int) operand_mode].insn_code
8492 != CODE_FOR_nothing)
8493 || (ffs_optab->handlers[(int) operand_mode].insn_code
8494 != CODE_FOR_nothing)))
8500 if (! get_subtarget (target)
8501 || GET_MODE (subtarget) != operand_mode)
8504 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8507 target = gen_reg_rtx (mode);
8509 result = emit_store_flag (target, code, op0, op1,
8510 operand_mode, unsignedp, 1);
8515 result = expand_binop (mode, xor_optab, result, const1_rtx,
8516 result, 0, OPTAB_LIB_WIDEN);
8520 /* If this failed, we have to do this with set/compare/jump/set code. */
8522 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8523 target = gen_reg_rtx (GET_MODE (target));
8525 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8526 result = compare_from_rtx (op0, op1, code, unsignedp,
8527 operand_mode, NULL_RTX);
8528 if (GET_CODE (result) == CONST_INT)
8529 return (((result == const0_rtx && ! invert)
8530 || (result != const0_rtx && invert))
8531 ? const0_rtx : const1_rtx);
8533 /* The code of RESULT may not match CODE if compare_from_rtx
8534 decided to swap its operands and reverse the original code.
8536 We know that compare_from_rtx returns either a CONST_INT or
8537 a new comparison code, so it is safe to just extract the
8538 code from RESULT. */
8539 code = GET_CODE (result);
8541 label = gen_label_rtx ();
8542 if (bcc_gen_fctn[(int) code] == 0)
8545 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8546 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8553 /* Stubs in case we haven't got a casesi insn. */
8555 # define HAVE_casesi 0
8556 # define gen_casesi(a, b, c, d, e) (0)
8557 # define CODE_FOR_casesi CODE_FOR_nothing
8560 /* If the machine does not have a case insn that compares the bounds,
8561 this means extra overhead for dispatch tables, which raises the
8562 threshold for using them. */
8563 #ifndef CASE_VALUES_THRESHOLD
8564 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8565 #endif /* CASE_VALUES_THRESHOLD */
8568 case_values_threshold (void)
8570 return CASE_VALUES_THRESHOLD;
8573 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8574 0 otherwise (i.e. if there is no casesi instruction). */
8576 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8577 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8579 enum machine_mode index_mode = SImode;
8580 int index_bits = GET_MODE_BITSIZE (index_mode);
8581 rtx op1, op2, index;
8582 enum machine_mode op_mode;
8587 /* Convert the index to SImode. */
8588 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8590 enum machine_mode omode = TYPE_MODE (index_type);
8591 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8593 /* We must handle the endpoints in the original mode. */
8594 index_expr = build2 (MINUS_EXPR, index_type,
8595 index_expr, minval);
8596 minval = integer_zero_node;
8597 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8598 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8599 omode, 1, default_label);
8600 /* Now we can safely truncate. */
8601 index = convert_to_mode (index_mode, index, 0);
8605 if (TYPE_MODE (index_type) != index_mode)
8607 index_expr = convert (lang_hooks.types.type_for_size
8608 (index_bits, 0), index_expr);
8609 index_type = TREE_TYPE (index_expr);
8612 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8615 do_pending_stack_adjust ();
8617 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8618 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8620 index = copy_to_mode_reg (op_mode, index);
8622 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8624 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8625 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8626 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8627 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8629 op1 = copy_to_mode_reg (op_mode, op1);
8631 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8633 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8634 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8635 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8636 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8638 op2 = copy_to_mode_reg (op_mode, op2);
8640 emit_jump_insn (gen_casesi (index, op1, op2,
8641 table_label, default_label));
8645 /* Attempt to generate a tablejump instruction; same concept. */
8646 #ifndef HAVE_tablejump
8647 #define HAVE_tablejump 0
8648 #define gen_tablejump(x, y) (0)
8651 /* Subroutine of the next function.
8653 INDEX is the value being switched on, with the lowest value
8654 in the table already subtracted.
8655 MODE is its expected mode (needed if INDEX is constant).
8656 RANGE is the length of the jump table.
8657 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8659 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8660 index value is out of range. */
8663 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8668 if (INTVAL (range) > cfun->max_jumptable_ents)
8669 cfun->max_jumptable_ents = INTVAL (range);
8671 /* Do an unsigned comparison (in the proper mode) between the index
8672 expression and the value which represents the length of the range.
8673 Since we just finished subtracting the lower bound of the range
8674 from the index expression, this comparison allows us to simultaneously
8675 check that the original index expression value is both greater than
8676 or equal to the minimum value of the range and less than or equal to
8677 the maximum value of the range. */
8679 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8682 /* If index is in range, it must fit in Pmode.
8683 Convert to Pmode so we can index with it. */
8685 index = convert_to_mode (Pmode, index, 1);
8687 /* Don't let a MEM slip through, because then INDEX that comes
8688 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8689 and break_out_memory_refs will go to work on it and mess it up. */
8690 #ifdef PIC_CASE_VECTOR_ADDRESS
8691 if (flag_pic && !REG_P (index))
8692 index = copy_to_mode_reg (Pmode, index);
8695 /* If flag_force_addr were to affect this address
8696 it could interfere with the tricky assumptions made
8697 about addresses that contain label-refs,
8698 which may be valid only very near the tablejump itself. */
8699 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8700 GET_MODE_SIZE, because this indicates how large insns are. The other
8701 uses should all be Pmode, because they are addresses. This code
8702 could fail if addresses and insns are not the same size. */
8703 index = gen_rtx_PLUS (Pmode,
8704 gen_rtx_MULT (Pmode, index,
8705 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8706 gen_rtx_LABEL_REF (Pmode, table_label));
8707 #ifdef PIC_CASE_VECTOR_ADDRESS
8709 index = PIC_CASE_VECTOR_ADDRESS (index);
8712 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8713 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8714 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
8715 RTX_UNCHANGING_P (vector) = 1;
8716 MEM_NOTRAP_P (vector) = 1;
8717 convert_move (temp, vector, 0);
8719 emit_jump_insn (gen_tablejump (temp, table_label));
8721 /* If we are generating PIC code or if the table is PC-relative, the
8722 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8723 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8728 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8729 rtx table_label, rtx default_label)
8733 if (! HAVE_tablejump)
8736 index_expr = fold (build2 (MINUS_EXPR, index_type,
8737 convert (index_type, index_expr),
8738 convert (index_type, minval)));
8739 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8740 do_pending_stack_adjust ();
8742 do_tablejump (index, TYPE_MODE (index_type),
8743 convert_modes (TYPE_MODE (index_type),
8744 TYPE_MODE (TREE_TYPE (range)),
8745 expand_expr (range, NULL_RTX,
8747 TYPE_UNSIGNED (TREE_TYPE (range))),
8748 table_label, default_label);
8752 /* Nonzero if the mode is a valid vector mode for this architecture.
8753 This returns nonzero even if there is no hardware support for the
8754 vector mode, but we can emulate with narrower modes. */
8757 vector_mode_valid_p (enum machine_mode mode)
8759 enum mode_class class = GET_MODE_CLASS (mode);
8760 enum machine_mode innermode;
8762 /* Doh! What's going on? */
8763 if (class != MODE_VECTOR_INT
8764 && class != MODE_VECTOR_FLOAT)
8767 /* Hardware support. Woo hoo! */
8768 if (VECTOR_MODE_SUPPORTED_P (mode))
8771 innermode = GET_MODE_INNER (mode);
8773 /* We should probably return 1 if requesting V4DI and we have no DI,
8774 but we have V2DI, but this is probably very unlikely. */
8776 /* If we have support for the inner mode, we can safely emulate it.
8777 We may not have V2DI, but me can emulate with a pair of DIs. */
8778 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
8781 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8783 const_vector_from_tree (tree exp)
8788 enum machine_mode inner, mode;
8790 mode = TYPE_MODE (TREE_TYPE (exp));
8792 if (initializer_zerop (exp))
8793 return CONST0_RTX (mode);
8795 units = GET_MODE_NUNITS (mode);
8796 inner = GET_MODE_INNER (mode);
8798 v = rtvec_alloc (units);
8800 link = TREE_VECTOR_CST_ELTS (exp);
8801 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8803 elt = TREE_VALUE (link);
8805 if (TREE_CODE (elt) == REAL_CST)
8806 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8809 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8810 TREE_INT_CST_HIGH (elt),
8814 /* Initialize remaining elements to 0. */
8815 for (; i < units; ++i)
8816 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8818 return gen_rtx_raw_CONST_VECTOR (mode, v);
8820 #include "gt-expr.h"