1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
244 init_expr_once (void)
247 enum machine_mode mode;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 if (! HARD_REGNO_MODE_OK (regno, mode))
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 /* Try converting directly if the insn is supported. */
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
440 insns = get_insns ();
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
475 if (to_mode == full_mode)
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
486 /* else proceed to integer conversions below. */
487 from_mode = full_mode;
491 /* Now both modes are integers. */
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
515 emit_unop_insn (code, to, from, equiv_code);
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
535 /* No special multiword conversion insn; do it by hand. */
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
548 lowpart_mode = from_mode;
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
555 /* Compute the value to put in each remaining word. */
557 fill_value = const0_rtx;
562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563 && STORE_FLAG_VALUE == -1)
565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
587 gcc_assert (subword);
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
593 insns = get_insns ();
596 emit_no_conflict_block (insns, to, from, NULL_RTX,
597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
612 convert_move (to, gen_lowpart (word_mode, from), 0);
616 /* Now follow all the conversions between integers
617 no more than a word long. */
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622 GET_MODE_BITSIZE (from_mode)))
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
634 emit_move_insn (to, gen_lowpart (to_mode, from));
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
645 emit_unop_insn (code, to, from, equiv_code);
650 enum machine_mode intermediate;
654 /* Search for a mode to convert via. */
655 for (intermediate = from_mode; intermediate != VOIDmode;
656 intermediate = GET_MODE_WIDER_MODE (intermediate))
657 if (((can_extend_p (to_mode, intermediate, unsignedp)
659 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
660 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661 GET_MODE_BITSIZE (intermediate))))
662 && (can_extend_p (intermediate, from_mode, unsignedp)
663 != CODE_FOR_nothing))
665 convert_move (to, convert_to_mode (intermediate, from,
666 unsignedp), unsignedp);
670 /* No suitable intermediate mode.
671 Generate what we need with shifts. */
672 shift_amount = build_int_cst (NULL_TREE,
673 GET_MODE_BITSIZE (to_mode)
674 - GET_MODE_BITSIZE (from_mode));
675 from = gen_lowpart (to_mode, force_reg (from_mode, from));
676 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
678 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
681 emit_move_insn (to, tmp);
686 /* Support special truncate insns for certain modes. */
687 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
689 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 /* Handle truncation of volatile memrefs, and so on;
695 the things that couldn't be truncated directly,
696 and for which there was no special instruction.
698 ??? Code above formerly short-circuited this, for most integer
699 mode pairs, with a force_reg in from_mode followed by a recursive
700 call to this routine. Appears always to have been wrong. */
701 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
703 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704 emit_move_insn (to, temp);
708 /* Mode combination is not recognized. */
712 /* Return an rtx for a value that would result
713 from converting X to mode MODE.
714 Both X and MODE may be floating, or both integer.
715 UNSIGNEDP is nonzero if X is an unsigned value.
716 This can be done by referring to a part of X in place
717 or by copying to a new temporary with conversion. */
720 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
722 return convert_modes (mode, VOIDmode, x, unsignedp);
725 /* Return an rtx for a value that would result
726 from converting X from mode OLDMODE to mode MODE.
727 Both modes may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion.
733 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
736 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
740 /* If FROM is a SUBREG that indicates that we have already done at least
741 the required extension, strip it. */
743 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746 x = gen_lowpart (mode, x);
748 if (GET_MODE (x) != VOIDmode)
749 oldmode = GET_MODE (x);
754 /* There is one case that we must handle specially: If we are converting
755 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
756 we are to interpret the constant as unsigned, gen_lowpart will do
757 the wrong if the constant appears negative. What we want to do is
758 make the high-order word of the constant zero, not all ones. */
760 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
761 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
762 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
764 HOST_WIDE_INT val = INTVAL (x);
766 if (oldmode != VOIDmode
767 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
769 int width = GET_MODE_BITSIZE (oldmode);
771 /* We need to zero extend VAL. */
772 val &= ((HOST_WIDE_INT) 1 << width) - 1;
775 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
778 /* We can do this with a gen_lowpart if both desired and current modes
779 are integer, and this is either a constant integer, a register, or a
780 non-volatile MEM. Except for the constant case where MODE is no
781 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
783 if ((GET_CODE (x) == CONST_INT
784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
785 || (GET_MODE_CLASS (mode) == MODE_INT
786 && GET_MODE_CLASS (oldmode) == MODE_INT
787 && (GET_CODE (x) == CONST_DOUBLE
788 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
789 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
790 && direct_load[(int) mode])
792 && (! HARD_REGISTER_P (x)
793 || HARD_REGNO_MODE_OK (REGNO (x), mode))
794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795 GET_MODE_BITSIZE (GET_MODE (x)))))))))
797 /* ?? If we don't know OLDMODE, we have to assume here that
798 X does not need sign- or zero-extension. This may not be
799 the case, but it's the best we can do. */
800 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
803 HOST_WIDE_INT val = INTVAL (x);
804 int width = GET_MODE_BITSIZE (oldmode);
806 /* We must sign or zero-extend in this case. Start by
807 zero-extending, then sign extend if we need to. */
808 val &= ((HOST_WIDE_INT) 1 << width) - 1;
810 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811 val |= (HOST_WIDE_INT) (-1) << width;
813 return gen_int_mode (val, mode);
816 return gen_lowpart (mode, x);
819 /* Converting from integer constant into mode is always equivalent to an
821 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
823 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
824 return simplify_gen_subreg (mode, x, oldmode, 0);
827 temp = gen_reg_rtx (mode);
828 convert_move (temp, x, unsignedp);
832 /* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
837 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
839 /* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
844 can_move_by_pieces (unsigned HOST_WIDE_INT len,
845 unsigned int align ATTRIBUTE_UNUSED)
847 return MOVE_BY_PIECES_P (len, align);
850 /* Generate several move instructions to copy LEN bytes from block FROM to
851 block TO. (These are MEM rtx's with BLKmode).
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
856 ALIGN is maximum stack alignment we can assume.
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
863 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
866 struct move_by_pieces data;
867 rtx to_addr, from_addr = XEXP (from, 0);
868 unsigned int max_size = MOVE_MAX_PIECES + 1;
869 enum machine_mode mode = VOIDmode, tmode;
870 enum insn_code icode;
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
875 data.from_addr = from_addr;
878 to_addr = XEXP (to, 0);
881 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
884 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 #ifdef STACK_GROWS_DOWNWARD
897 data.to_addr = to_addr;
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
906 if (data.reverse) data.offset = len;
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
913 && move_by_pieces_ninsns (len, align, max_size) > 2)
915 /* Find the mode of the largest move... */
916 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918 if (GET_MODE_SIZE (tmode) < max_size)
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
923 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_addr_to_reg (from_addr);
935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
937 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
939 data.explicit_inc_to = -1;
941 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (to_addr);
945 data.explicit_inc_to = 1;
947 if (!data.autinc_to && CONSTANT_P (to_addr))
948 data.to_addr = copy_addr_to_reg (to_addr);
951 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952 if (align >= GET_MODE_ALIGNMENT (tmode))
953 align = GET_MODE_ALIGNMENT (tmode);
956 enum machine_mode xmode;
958 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
960 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962 || SLOW_UNALIGNED_ACCESS (tmode, align))
965 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
968 /* First move what we can in the largest integer mode, then go to
969 successively smaller modes. */
973 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) < max_size)
978 if (mode == VOIDmode)
981 icode = mov_optab->handlers[(int) mode].insn_code;
982 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
983 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
985 max_size = GET_MODE_SIZE (mode);
988 /* The code above should have handled everything. */
989 gcc_assert (!data.len);
995 gcc_assert (!data.reverse);
1000 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1003 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1006 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 to1 = adjust_address (data.to, QImode, data.offset);
1021 /* Return number of insns required to move L bytes by pieces.
1022 ALIGN (in bits) is maximum alignment we can assume. */
1024 static unsigned HOST_WIDE_INT
1025 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026 unsigned int max_size)
1028 unsigned HOST_WIDE_INT n_insns = 0;
1029 enum machine_mode tmode;
1031 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032 if (align >= GET_MODE_ALIGNMENT (tmode))
1033 align = GET_MODE_ALIGNMENT (tmode);
1036 enum machine_mode tmode, xmode;
1038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1040 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042 || SLOW_UNALIGNED_ACCESS (tmode, align))
1045 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1048 while (max_size > 1)
1050 enum machine_mode mode = VOIDmode;
1051 enum insn_code icode;
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1058 if (mode == VOIDmode)
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1063 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1065 max_size = GET_MODE_SIZE (mode);
1072 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1073 with move instructions for mode MODE. GENFUN is the gen_... function
1074 to make a move insn for that mode. DATA has all the other info. */
1077 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078 struct move_by_pieces *data)
1080 unsigned int size = GET_MODE_SIZE (mode);
1081 rtx to1 = NULL_RTX, from1;
1083 while (data->len >= size)
1086 data->offset -= size;
1090 if (data->autinc_to)
1091 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1094 to1 = adjust_address (data->to, mode, data->offset);
1097 if (data->autinc_from)
1098 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1101 from1 = adjust_address (data->from, mode, data->offset);
1103 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1104 emit_insn (gen_add2_insn (data->to_addr,
1105 GEN_INT (-(HOST_WIDE_INT)size)));
1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1107 emit_insn (gen_add2_insn (data->from_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
1111 emit_insn ((*genfun) (to1, from1));
1114 #ifdef PUSH_ROUNDING
1115 emit_single_push_insn (mode, from1, NULL);
1121 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1123 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1126 if (! data->reverse)
1127 data->offset += size;
1133 /* Emit code to move a block Y to a block X. This may be done with
1134 string-move instructions, with multiple scalar move instructions,
1135 or with a library call.
1137 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1138 SIZE is an rtx that says how long they are.
1139 ALIGN is the maximum alignment we can assume they have.
1140 METHOD describes what kind of copy this is, and what mechanisms may be used.
1142 Return the address of the new block, if memcpy is called and returns it,
1146 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1154 case BLOCK_OP_NORMAL:
1155 case BLOCK_OP_TAILCALL:
1156 may_use_call = true;
1159 case BLOCK_OP_CALL_PARM:
1160 may_use_call = block_move_libcall_safe_for_call_parm ();
1162 /* Make inhibit_defer_pop nonzero around the library call
1163 to force it to pop the arguments right away. */
1167 case BLOCK_OP_NO_LIBCALL:
1168 may_use_call = false;
1175 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1177 gcc_assert (MEM_P (x));
1178 gcc_assert (MEM_P (y));
1181 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182 block copy is more efficient for other large modes, e.g. DCmode. */
1183 x = adjust_address (x, BLKmode, 0);
1184 y = adjust_address (y, BLKmode, 0);
1186 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1187 can be incorrect is coming from __builtin_memcpy. */
1188 if (GET_CODE (size) == CONST_INT)
1190 if (INTVAL (size) == 0)
1193 x = shallow_copy_rtx (x);
1194 y = shallow_copy_rtx (y);
1195 set_mem_size (x, size);
1196 set_mem_size (y, size);
1199 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1200 move_by_pieces (x, y, INTVAL (size), align, 0);
1201 else if (emit_block_move_via_movmem (x, y, size, align))
1203 else if (may_use_call)
1204 retval = emit_block_move_via_libcall (x, y, size,
1205 method == BLOCK_OP_TAILCALL);
1207 emit_block_move_via_loop (x, y, size, align);
1209 if (method == BLOCK_OP_CALL_PARM)
1215 /* A subroutine of emit_block_move. Returns true if calling the
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1220 block_move_libcall_safe_for_call_parm (void)
1222 /* If arguments are pushed on the stack, then they're safe. */
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1230 tree fn = emit_block_move_libcall_fn (false);
1232 if (REG_PARM_STACK_SPACE (fn) != 0)
1237 /* If any argument goes in memory, then it might clobber an outgoing
1240 CUMULATIVE_ARGS args_so_far;
1243 fn = emit_block_move_libcall_fn (false);
1244 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251 if (!tmp || !REG_P (tmp))
1253 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1255 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1261 /* A subroutine of emit_block_move. Expand a movmem pattern;
1262 return true if successful. */
1265 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1267 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1268 int save_volatile_ok = volatile_ok;
1269 enum machine_mode mode;
1271 /* Since this is a move insn, we don't care about volatility. */
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1281 enum insn_code code = movmem_optab[(int) mode];
1282 insn_operand_predicate_fn pred;
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. */
1289 && ((GET_CODE (size) == CONST_INT
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294 || (*pred) (x, BLKmode))
1295 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296 || (*pred) (y, BLKmode))
1297 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298 || (*pred) (opalign, VOIDmode)))
1301 rtx last = get_last_insn ();
1304 op2 = convert_to_mode (mode, size, 1);
1305 pred = insn_data[(int) code].operand[2].predicate;
1306 if (pred != 0 && ! (*pred) (op2, mode))
1307 op2 = copy_to_mode_reg (mode, op2);
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1318 volatile_ok = save_volatile_ok;
1322 delete_insns_since (last);
1326 volatile_ok = save_volatile_ok;
1330 /* A subroutine of emit_block_move. Expand a call to memcpy.
1331 Return the return value from memcpy, 0 otherwise. */
1334 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1336 rtx dst_addr, src_addr;
1337 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1345 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
1354 size_mode = TYPE_MODE (sizetype);
1356 size = convert_to_mode (size_mode, size, 1);
1357 size = copy_to_mode_reg (size_mode, size);
1359 /* It is incorrect to use the libcall calling conventions to call
1360 memcpy in this context. This could be a user call to memcpy and
1361 the user may wish to examine the return value from memcpy. For
1362 targets where libcalls and normal calls have different conventions
1363 for returning pointers, we could end up generating incorrect code. */
1365 size_tree = make_tree (sizetype, size);
1367 fn = emit_block_move_libcall_fn (true);
1368 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1369 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1370 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1372 /* Now we have to build up the CALL_EXPR itself. */
1373 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1374 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1375 call_expr, arg_list, NULL_TREE);
1376 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1378 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1383 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1384 for the function we use for block copies. The first time FOR_CALL
1385 is true, we call assemble_external. */
1387 static GTY(()) tree block_move_fn;
1390 init_block_move_fn (const char *asmspec)
1396 fn = get_identifier ("memcpy");
1397 args = build_function_type_list (ptr_type_node, ptr_type_node,
1398 const_ptr_type_node, sizetype,
1401 fn = build_decl (FUNCTION_DECL, fn, args);
1402 DECL_EXTERNAL (fn) = 1;
1403 TREE_PUBLIC (fn) = 1;
1404 DECL_ARTIFICIAL (fn) = 1;
1405 TREE_NOTHROW (fn) = 1;
1411 set_user_assembler_name (block_move_fn, asmspec);
1415 emit_block_move_libcall_fn (int for_call)
1417 static bool emitted_extern;
1420 init_block_move_fn (NULL);
1422 if (for_call && !emitted_extern)
1424 emitted_extern = true;
1425 make_decl_rtl (block_move_fn);
1426 assemble_external (block_move_fn);
1429 return block_move_fn;
1432 /* A subroutine of emit_block_move. Copy the data via an explicit
1433 loop. This is used only when libcalls are forbidden. */
1434 /* ??? It'd be nice to copy in hunks larger than QImode. */
1437 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1438 unsigned int align ATTRIBUTE_UNUSED)
1440 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1441 enum machine_mode iter_mode;
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1451 emit_move_insn (iter, const0_rtx);
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1460 tmp = convert_modes (Pmode, iter_mode, iter, true);
1461 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1462 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1466 emit_move_insn (x, y);
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1471 emit_move_insn (iter, tmp);
1473 emit_label (cmp_label);
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1483 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1486 #ifdef HAVE_load_multiple
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple)
1501 last = get_last_insn ();
1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 delete_insns_since (last);
1514 for (i = 0; i < nregs; i++)
1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1516 operand_subword_force (x, i, mode));
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1523 move_block_from_reg (int regno, rtx x, int nregs)
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple)
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 delete_insns_since (last);
1547 for (i = 0; i < nregs; i++)
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1564 gen_group_rtx (rtx orig)
1569 gcc_assert (GET_CODE (orig) == PARALLEL);
1571 length = XVECLEN (orig, 0);
1572 tmps = alloca (sizeof (rtx) * length);
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580 for (; i < length; i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1596 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1600 enum machine_mode m = GET_MODE (orig_src);
1602 gcc_assert (GET_CODE (dst) == PARALLEL);
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
1620 emit_group_load_1 (tmps, dst, src, type, ssize);
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1653 bytelen = ssize - bytepos;
1654 gcc_assert (bytelen > 0);
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1661 if (!MEM_P (orig_src)
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1669 src = gen_reg_rtx (GET_MODE (orig_src));
1671 emit_move_insn (src, orig_src);
1674 /* Optimize the access just a bit. */
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1679 && bytelen == GET_MODE_SIZE (mode))
1681 tmps[i] = gen_reg_rtx (mode);
1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1689 else if (GET_CODE (src) == CONCAT)
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1701 tmps[i] = XEXP (src, bytepos / slen0);
1702 if (! CONSTANT_P (tmps[i])
1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1705 (bytepos % slen0) * BITS_PER_UNIT,
1706 1, NULL_RTX, mode, mode);
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1714 emit_move_insn (mem, src);
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1735 else if (CONSTANT_P (src)
1736 || (REG_P (src) && GET_MODE (src) == mode))
1739 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1740 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1744 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1745 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1749 /* Emit code to move a block SRC of type TYPE to a block DST,
1750 where DST is non-consecutive registers represented by a PARALLEL.
1751 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1755 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1760 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1761 emit_group_load_1 (tmps, dst, src, type, ssize);
1763 /* Copy the extracted pieces into the proper (probable) hard regs. */
1764 for (i = 0; i < XVECLEN (dst, 0); i++)
1766 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1769 emit_move_insn (d, tmps[i]);
1773 /* Similar, but load SRC into new pseudos in a format that looks like
1774 PARALLEL. This can later be fed to emit_group_move to get things
1775 in the right place. */
1778 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1783 vec = rtvec_alloc (XVECLEN (parallel, 0));
1784 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1786 /* Convert the vector to look just like the original PARALLEL, except
1787 with the computed values. */
1788 for (i = 0; i < XVECLEN (parallel, 0); i++)
1790 rtx e = XVECEXP (parallel, 0, i);
1791 rtx d = XEXP (e, 0);
1795 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1796 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1798 RTVEC_ELT (vec, i) = e;
1801 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1804 /* Emit code to move a block SRC to block DST, where SRC and DST are
1805 non-consecutive groups of registers, each represented by a PARALLEL. */
1808 emit_group_move (rtx dst, rtx src)
1812 gcc_assert (GET_CODE (src) == PARALLEL
1813 && GET_CODE (dst) == PARALLEL
1814 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1816 /* Skip first entry if NULL. */
1817 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1818 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1819 XEXP (XVECEXP (src, 0, i), 0));
1822 /* Move a group of registers represented by a PARALLEL into pseudos. */
1825 emit_group_move_into_temps (rtx src)
1827 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1830 for (i = 0; i < XVECLEN (src, 0); i++)
1832 rtx e = XVECEXP (src, 0, i);
1833 rtx d = XEXP (e, 0);
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1837 RTVEC_ELT (vec, i) = e;
1840 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1843 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1844 where SRC is non-consecutive registers represented by a PARALLEL.
1845 SSIZE represents the total size of block ORIG_DST, or -1 if not
1849 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1853 enum machine_mode m = GET_MODE (orig_dst);
1855 gcc_assert (GET_CODE (src) == PARALLEL);
1857 if (!SCALAR_INT_MODE_P (m)
1858 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1860 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1861 if (imode == BLKmode)
1862 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1864 dst = gen_reg_rtx (imode);
1865 emit_group_store (dst, src, type, ssize);
1866 if (imode != BLKmode)
1867 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1868 emit_move_insn (orig_dst, dst);
1872 /* Check for a NULL entry, used to indicate that the parameter goes
1873 both on the stack and in registers. */
1874 if (XEXP (XVECEXP (src, 0, 0), 0))
1879 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1881 /* Copy the (probable) hard regs into pseudos. */
1882 for (i = start; i < XVECLEN (src, 0); i++)
1884 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1885 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1886 emit_move_insn (tmps[i], reg);
1889 /* If we won't be storing directly into memory, protect the real destination
1890 from strange tricks we might play. */
1892 if (GET_CODE (dst) == PARALLEL)
1896 /* We can get a PARALLEL dst if there is a conditional expression in
1897 a return statement. In that case, the dst and src are the same,
1898 so no action is necessary. */
1899 if (rtx_equal_p (dst, src))
1902 /* It is unclear if we can ever reach here, but we may as well handle
1903 it. Allocate a temporary, and split this into a store/load to/from
1906 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1907 emit_group_store (temp, src, type, ssize);
1908 emit_group_load (dst, temp, type, ssize);
1911 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1913 dst = gen_reg_rtx (GET_MODE (orig_dst));
1914 /* Make life a bit easier for combine. */
1915 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (src, 0); i++)
1921 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1922 enum machine_mode mode = GET_MODE (tmps[i]);
1923 unsigned int bytelen = GET_MODE_SIZE (mode);
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1929 /* store_bit_field always takes its value from the lsb.
1930 Move the fragment to the lsb if it's not already there. */
1932 #ifdef BLOCK_REG_PADDING
1933 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1934 == (BYTES_BIG_ENDIAN ? upward : downward)
1940 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1941 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1942 build_int_cst (NULL_TREE, shift),
1945 bytelen = ssize - bytepos;
1948 if (GET_CODE (dst) == CONCAT)
1950 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1951 dest = XEXP (dst, 0);
1952 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1954 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1955 dest = XEXP (dst, 1);
1959 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1960 dest = assign_stack_temp (GET_MODE (dest),
1961 GET_MODE_SIZE (GET_MODE (dest)), 0);
1962 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1969 /* Optimize the access just a bit. */
1971 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1972 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1973 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1974 && bytelen == GET_MODE_SIZE (mode))
1975 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1977 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1981 /* Copy from the pseudo into the (probable) hard reg. */
1982 if (orig_dst != dst)
1983 emit_move_insn (orig_dst, dst);
1986 /* Generate code to copy a BLKmode object of TYPE out of a
1987 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1988 is null, a stack temporary is created. TGTBLK is returned.
1990 The purpose of this routine is to handle functions that return
1991 BLKmode structures in registers. Some machines (the PA for example)
1992 want to return all small structures in registers regardless of the
1993 structure's alignment. */
1996 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1998 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1999 rtx src = NULL, dst = NULL;
2000 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2001 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2005 tgtblk = assign_temp (build_qualified_type (type,
2007 | TYPE_QUAL_CONST)),
2009 preserve_temp_slots (tgtblk);
2012 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2013 into a new pseudo which is a full word. */
2015 if (GET_MODE (srcreg) != BLKmode
2016 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2017 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2019 /* If the structure doesn't take up a whole number of words, see whether
2020 SRCREG is padded on the left or on the right. If it's on the left,
2021 set PADDING_CORRECTION to the number of bits to skip.
2023 In most ABIs, the structure will be returned at the least end of
2024 the register, which translates to right padding on little-endian
2025 targets and left padding on big-endian targets. The opposite
2026 holds if the structure is returned at the most significant
2027 end of the register. */
2028 if (bytes % UNITS_PER_WORD != 0
2029 && (targetm.calls.return_in_msb (type)
2031 : BYTES_BIG_ENDIAN))
2033 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2035 /* Copy the structure BITSIZE bites at a time.
2037 We could probably emit more efficient code for machines which do not use
2038 strict alignment, but it doesn't seem worth the effort at the current
2040 for (bitpos = 0, xbitpos = padding_correction;
2041 bitpos < bytes * BITS_PER_UNIT;
2042 bitpos += bitsize, xbitpos += bitsize)
2044 /* We need a new source operand each time xbitpos is on a
2045 word boundary and when xbitpos == padding_correction
2046 (the first time through). */
2047 if (xbitpos % BITS_PER_WORD == 0
2048 || xbitpos == padding_correction)
2049 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2052 /* We need a new destination operand each time bitpos is on
2054 if (bitpos % BITS_PER_WORD == 0)
2055 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2057 /* Use xbitpos for the source extraction (right justified) and
2058 xbitpos for the destination store (left justified). */
2059 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2060 extract_bit_field (src, bitsize,
2061 xbitpos % BITS_PER_WORD, 1,
2062 NULL_RTX, word_mode, word_mode));
2068 /* Add a USE expression for REG to the (possibly empty) list pointed
2069 to by CALL_FUSAGE. REG must denote a hard register. */
2072 use_reg (rtx *call_fusage, rtx reg)
2074 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2077 = gen_rtx_EXPR_LIST (VOIDmode,
2078 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2081 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2082 starting at REGNO. All of these registers must be hard registers. */
2085 use_regs (rtx *call_fusage, int regno, int nregs)
2089 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2091 for (i = 0; i < nregs; i++)
2092 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2095 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2096 PARALLEL REGS. This is for calls that pass values in multiple
2097 non-contiguous locations. The Irix 6 ABI has examples of this. */
2100 use_group_regs (rtx *call_fusage, rtx regs)
2104 for (i = 0; i < XVECLEN (regs, 0); i++)
2106 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2108 /* A NULL entry means the parameter goes both on the stack and in
2109 registers. This can also be a MEM for targets that pass values
2110 partially on the stack and partially in registers. */
2111 if (reg != 0 && REG_P (reg))
2112 use_reg (call_fusage, reg);
2117 /* Determine whether the LEN bytes generated by CONSTFUN can be
2118 stored to memory using several move instructions. CONSTFUNDATA is
2119 a pointer which will be passed as argument in every CONSTFUN call.
2120 ALIGN is maximum alignment we can assume. Return nonzero if a
2121 call to store_by_pieces should succeed. */
2124 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2126 void *constfundata, unsigned int align)
2128 unsigned HOST_WIDE_INT l;
2129 unsigned int max_size;
2130 HOST_WIDE_INT offset = 0;
2131 enum machine_mode mode, tmode;
2132 enum insn_code icode;
2139 if (! STORE_BY_PIECES_P (len, align))
2142 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2143 if (align >= GET_MODE_ALIGNMENT (tmode))
2144 align = GET_MODE_ALIGNMENT (tmode);
2147 enum machine_mode xmode;
2149 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2151 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2152 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2153 || SLOW_UNALIGNED_ACCESS (tmode, align))
2156 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2159 /* We would first store what we can in the largest integer mode, then go to
2160 successively smaller modes. */
2163 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2168 max_size = STORE_MAX_PIECES + 1;
2169 while (max_size > 1)
2171 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2172 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2173 if (GET_MODE_SIZE (tmode) < max_size)
2176 if (mode == VOIDmode)
2179 icode = mov_optab->handlers[(int) mode].insn_code;
2180 if (icode != CODE_FOR_nothing
2181 && align >= GET_MODE_ALIGNMENT (mode))
2183 unsigned int size = GET_MODE_SIZE (mode);
2190 cst = (*constfun) (constfundata, offset, mode);
2191 if (!LEGITIMATE_CONSTANT_P (cst))
2201 max_size = GET_MODE_SIZE (mode);
2204 /* The code above should have handled everything. */
2211 /* Generate several move instructions to store LEN bytes generated by
2212 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2213 pointer which will be passed as argument in every CONSTFUN call.
2214 ALIGN is maximum alignment we can assume.
2215 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2216 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2220 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2221 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2222 void *constfundata, unsigned int align, int endp)
2224 struct store_by_pieces data;
2228 gcc_assert (endp != 2);
2232 gcc_assert (STORE_BY_PIECES_P (len, align));
2233 data.constfun = constfun;
2234 data.constfundata = constfundata;
2237 store_by_pieces_1 (&data, align);
2242 gcc_assert (!data.reverse);
2247 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2248 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2250 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2253 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2260 to1 = adjust_address (data.to, QImode, data.offset);
2268 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2269 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2272 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2274 struct store_by_pieces data;
2279 data.constfun = clear_by_pieces_1;
2280 data.constfundata = NULL;
2283 store_by_pieces_1 (&data, align);
2286 /* Callback routine for clear_by_pieces.
2287 Return const0_rtx unconditionally. */
2290 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2291 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2292 enum machine_mode mode ATTRIBUTE_UNUSED)
2297 /* Subroutine of clear_by_pieces and store_by_pieces.
2298 Generate several move instructions to store LEN bytes of block TO. (A MEM
2299 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2302 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2303 unsigned int align ATTRIBUTE_UNUSED)
2305 rtx to_addr = XEXP (data->to, 0);
2306 unsigned int max_size = STORE_MAX_PIECES + 1;
2307 enum machine_mode mode = VOIDmode, tmode;
2308 enum insn_code icode;
2311 data->to_addr = to_addr;
2313 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2314 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2316 data->explicit_inc_to = 0;
2318 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2320 data->offset = data->len;
2322 /* If storing requires more than two move insns,
2323 copy addresses to registers (to make displacements shorter)
2324 and use post-increment if available. */
2325 if (!data->autinc_to
2326 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2328 /* Determine the main mode we'll be using. */
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2334 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2336 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2337 data->autinc_to = 1;
2338 data->explicit_inc_to = -1;
2341 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2342 && ! data->autinc_to)
2344 data->to_addr = copy_addr_to_reg (to_addr);
2345 data->autinc_to = 1;
2346 data->explicit_inc_to = 1;
2349 if ( !data->autinc_to && CONSTANT_P (to_addr))
2350 data->to_addr = copy_addr_to_reg (to_addr);
2353 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2354 if (align >= GET_MODE_ALIGNMENT (tmode))
2355 align = GET_MODE_ALIGNMENT (tmode);
2358 enum machine_mode xmode;
2360 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2362 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2363 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2364 || SLOW_UNALIGNED_ACCESS (tmode, align))
2367 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2370 /* First store what we can in the largest integer mode, then go to
2371 successively smaller modes. */
2373 while (max_size > 1)
2375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2377 if (GET_MODE_SIZE (tmode) < max_size)
2380 if (mode == VOIDmode)
2383 icode = mov_optab->handlers[(int) mode].insn_code;
2384 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2385 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2387 max_size = GET_MODE_SIZE (mode);
2390 /* The code above should have handled everything. */
2391 gcc_assert (!data->len);
2394 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2395 with move instructions for mode MODE. GENFUN is the gen_... function
2396 to make a move insn for that mode. DATA has all the other info. */
2399 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2400 struct store_by_pieces *data)
2402 unsigned int size = GET_MODE_SIZE (mode);
2405 while (data->len >= size)
2408 data->offset -= size;
2410 if (data->autinc_to)
2411 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2414 to1 = adjust_address (data->to, mode, data->offset);
2416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2417 emit_insn (gen_add2_insn (data->to_addr,
2418 GEN_INT (-(HOST_WIDE_INT) size)));
2420 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2421 emit_insn ((*genfun) (to1, cst));
2423 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2424 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2426 if (! data->reverse)
2427 data->offset += size;
2433 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2434 its length in bytes. */
2437 clear_storage (rtx object, rtx size, enum block_op_methods method)
2439 enum machine_mode mode = GET_MODE (object);
2442 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2444 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2445 just move a zero. Otherwise, do this a piece at a time. */
2447 && GET_CODE (size) == CONST_INT
2448 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2450 rtx zero = CONST0_RTX (mode);
2453 emit_move_insn (object, zero);
2457 if (COMPLEX_MODE_P (mode))
2459 zero = CONST0_RTX (GET_MODE_INNER (mode));
2462 write_complex_part (object, zero, 0);
2463 write_complex_part (object, zero, 1);
2469 if (size == const0_rtx)
2472 align = MEM_ALIGN (object);
2474 if (GET_CODE (size) == CONST_INT
2475 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2476 clear_by_pieces (object, INTVAL (size), align);
2477 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2480 return clear_storage_via_libcall (object, size,
2481 method == BLOCK_OP_TAILCALL);
2486 /* A subroutine of clear_storage. Expand a call to memset.
2487 Return the return value of memset, 0 otherwise. */
2490 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2492 tree call_expr, arg_list, fn, object_tree, size_tree;
2493 enum machine_mode size_mode;
2496 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2497 place those into new pseudos into a VAR_DECL and use them later. */
2499 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2501 size_mode = TYPE_MODE (sizetype);
2502 size = convert_to_mode (size_mode, size, 1);
2503 size = copy_to_mode_reg (size_mode, size);
2505 /* It is incorrect to use the libcall calling conventions to call
2506 memset in this context. This could be a user call to memset and
2507 the user may wish to examine the return value from memset. For
2508 targets where libcalls and normal calls have different conventions
2509 for returning pointers, we could end up generating incorrect code. */
2511 object_tree = make_tree (ptr_type_node, object);
2512 size_tree = make_tree (sizetype, size);
2514 fn = clear_storage_libcall_fn (true);
2515 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2516 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2517 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2521 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2522 call_expr, arg_list, NULL_TREE);
2523 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2525 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2530 /* A subroutine of clear_storage_via_libcall. Create the tree node
2531 for the function we use for block clears. The first time FOR_CALL
2532 is true, we call assemble_external. */
2534 static GTY(()) tree block_clear_fn;
2537 init_block_clear_fn (const char *asmspec)
2539 if (!block_clear_fn)
2543 fn = get_identifier ("memset");
2544 args = build_function_type_list (ptr_type_node, ptr_type_node,
2545 integer_type_node, sizetype,
2548 fn = build_decl (FUNCTION_DECL, fn, args);
2549 DECL_EXTERNAL (fn) = 1;
2550 TREE_PUBLIC (fn) = 1;
2551 DECL_ARTIFICIAL (fn) = 1;
2552 TREE_NOTHROW (fn) = 1;
2554 block_clear_fn = fn;
2558 set_user_assembler_name (block_clear_fn, asmspec);
2562 clear_storage_libcall_fn (int for_call)
2564 static bool emitted_extern;
2566 if (!block_clear_fn)
2567 init_block_clear_fn (NULL);
2569 if (for_call && !emitted_extern)
2571 emitted_extern = true;
2572 make_decl_rtl (block_clear_fn);
2573 assemble_external (block_clear_fn);
2576 return block_clear_fn;
2579 /* Expand a setmem pattern; return true if successful. */
2582 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2594 enum insn_code code = setmem_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2612 enum machine_mode char_mode;
2613 rtx last = get_last_insn ();
2616 opsize = convert_to_mode (mode, size, 1);
2617 pred = insn_data[(int) code].operand[1].predicate;
2618 if (pred != 0 && ! (*pred) (opsize, mode))
2619 opsize = copy_to_mode_reg (mode, opsize);
2622 char_mode = insn_data[(int) code].operand[2].mode;
2623 if (char_mode != VOIDmode)
2625 opchar = convert_to_mode (char_mode, opchar, 1);
2626 pred = insn_data[(int) code].operand[2].predicate;
2627 if (pred != 0 && ! (*pred) (opchar, char_mode))
2628 opchar = copy_to_mode_reg (char_mode, opchar);
2631 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2638 delete_insns_since (last);
2646 /* Write to one of the components of the complex value CPLX. Write VAL to
2647 the real part if IMAG_P is false, and the imaginary part if its true. */
2650 write_complex_part (rtx cplx, rtx val, bool imag_p)
2652 enum machine_mode cmode;
2653 enum machine_mode imode;
2656 if (GET_CODE (cplx) == CONCAT)
2658 emit_move_insn (XEXP (cplx, imag_p), val);
2662 cmode = GET_MODE (cplx);
2663 imode = GET_MODE_INNER (cmode);
2664 ibitsize = GET_MODE_BITSIZE (imode);
2666 /* For MEMs simplify_gen_subreg may generate an invalid new address
2667 because, e.g., the original address is considered mode-dependent
2668 by the target, which restricts simplify_subreg from invoking
2669 adjust_address_nv. Instead of preparing fallback support for an
2670 invalid address, we call adjust_address_nv directly. */
2673 emit_move_insn (adjust_address_nv (cplx, imode,
2674 imag_p ? GET_MODE_SIZE (imode) : 0),
2679 /* If the sub-object is at least word sized, then we know that subregging
2680 will work. This special case is important, since store_bit_field
2681 wants to operate on integer modes, and there's rarely an OImode to
2682 correspond to TCmode. */
2683 if (ibitsize >= BITS_PER_WORD
2684 /* For hard regs we have exact predicates. Assume we can split
2685 the original object if it spans an even number of hard regs.
2686 This special case is important for SCmode on 64-bit platforms
2687 where the natural size of floating-point regs is 32-bit. */
2689 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2690 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2692 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2693 imag_p ? GET_MODE_SIZE (imode) : 0);
2696 emit_move_insn (part, val);
2700 /* simplify_gen_subreg may fail for sub-word MEMs. */
2701 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2704 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2707 /* Extract one of the components of the complex value CPLX. Extract the
2708 real part if IMAG_P is false, and the imaginary part if it's true. */
2711 read_complex_part (rtx cplx, bool imag_p)
2713 enum machine_mode cmode, imode;
2716 if (GET_CODE (cplx) == CONCAT)
2717 return XEXP (cplx, imag_p);
2719 cmode = GET_MODE (cplx);
2720 imode = GET_MODE_INNER (cmode);
2721 ibitsize = GET_MODE_BITSIZE (imode);
2723 /* Special case reads from complex constants that got spilled to memory. */
2724 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2726 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2727 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2729 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2730 if (CONSTANT_CLASS_P (part))
2731 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2735 /* For MEMs simplify_gen_subreg may generate an invalid new address
2736 because, e.g., the original address is considered mode-dependent
2737 by the target, which restricts simplify_subreg from invoking
2738 adjust_address_nv. Instead of preparing fallback support for an
2739 invalid address, we call adjust_address_nv directly. */
2741 return adjust_address_nv (cplx, imode,
2742 imag_p ? GET_MODE_SIZE (imode) : 0);
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since extract_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2757 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2762 /* simplify_gen_subreg may fail for sub-word MEMs. */
2763 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2766 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2767 true, NULL_RTX, imode, imode);
2770 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2771 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2772 represented in NEW_MODE. If FORCE is true, this will never happen, as
2773 we'll force-create a SUBREG if needed. */
2776 emit_move_change_mode (enum machine_mode new_mode,
2777 enum machine_mode old_mode, rtx x, bool force)
2781 if (reload_in_progress && MEM_P (x))
2783 /* We can't use gen_lowpart here because it may call change_address
2784 which is not appropriate if we were called when a reload was in
2785 progress. We don't have to worry about changing the address since
2786 the size in bytes is supposed to be the same. Copy the MEM to
2787 change the mode and move any substitutions from the old MEM to
2790 ret = adjust_address_nv (x, new_mode, 0);
2791 copy_replacements (x, ret);
2795 /* Note that we do want simplify_subreg's behavior of validating
2796 that the new mode is ok for a hard register. If we were to use
2797 simplify_gen_subreg, we would create the subreg, but would
2798 probably run into the target not being able to implement it. */
2799 /* Except, of course, when FORCE is true, when this is exactly what
2800 we want. Which is needed for CCmodes on some targets. */
2802 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2804 ret = simplify_subreg (new_mode, x, old_mode, 0);
2810 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2811 an integer mode of the same size as MODE. Returns the instruction
2812 emitted, or NULL if such a move could not be generated. */
2815 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2817 enum machine_mode imode;
2818 enum insn_code code;
2820 /* There must exist a mode of the exact size we require. */
2821 imode = int_mode_for_mode (mode);
2822 if (imode == BLKmode)
2825 /* The target must support moves in this mode. */
2826 code = mov_optab->handlers[imode].insn_code;
2827 if (code == CODE_FOR_nothing)
2830 x = emit_move_change_mode (imode, mode, x, false);
2833 y = emit_move_change_mode (imode, mode, y, false);
2836 return emit_insn (GEN_FCN (code) (x, y));
2839 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2840 Return an equivalent MEM that does not use an auto-increment. */
2843 emit_move_resolve_push (enum machine_mode mode, rtx x)
2845 enum rtx_code code = GET_CODE (XEXP (x, 0));
2846 HOST_WIDE_INT adjust;
2849 adjust = GET_MODE_SIZE (mode);
2850 #ifdef PUSH_ROUNDING
2851 adjust = PUSH_ROUNDING (adjust);
2853 if (code == PRE_DEC || code == POST_DEC)
2856 /* Do not use anti_adjust_stack, since we don't want to update
2857 stack_pointer_delta. */
2858 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2859 GEN_INT (adjust), stack_pointer_rtx,
2860 0, OPTAB_LIB_WIDEN);
2861 if (temp != stack_pointer_rtx)
2862 emit_move_insn (stack_pointer_rtx, temp);
2868 temp = stack_pointer_rtx;
2871 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2874 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2880 return replace_equiv_address (x, temp);
2883 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2884 X is known to satisfy push_operand, and MODE is known to be complex.
2885 Returns the last instruction emitted. */
2888 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2890 enum machine_mode submode = GET_MODE_INNER (mode);
2893 #ifdef PUSH_ROUNDING
2894 unsigned int submodesize = GET_MODE_SIZE (submode);
2896 /* In case we output to the stack, but the size is smaller than the
2897 machine can push exactly, we need to use move instructions. */
2898 if (PUSH_ROUNDING (submodesize) != submodesize)
2900 x = emit_move_resolve_push (mode, x);
2901 return emit_move_insn (x, y);
2905 /* Note that the real part always precedes the imag part in memory
2906 regardless of machine's endianness. */
2907 switch (GET_CODE (XEXP (x, 0)))
2921 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 read_complex_part (y, imag_first));
2923 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2924 read_complex_part (y, !imag_first));
2927 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2928 MODE is known to be complex. Returns the last instruction emitted. */
2931 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2935 /* Need to take special care for pushes, to maintain proper ordering
2936 of the data, and possibly extra padding. */
2937 if (push_operand (x, mode))
2938 return emit_move_complex_push (mode, x, y);
2940 /* See if we can coerce the target into moving both values at once. */
2942 /* Move floating point as parts. */
2943 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2944 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2946 /* Not possible if the values are inherently not adjacent. */
2947 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2949 /* Is possible if both are registers (or subregs of registers). */
2950 else if (register_operand (x, mode) && register_operand (y, mode))
2952 /* If one of the operands is a memory, and alignment constraints
2953 are friendly enough, we may be able to do combined memory operations.
2954 We do not attempt this if Y is a constant because that combination is
2955 usually better with the by-parts thing below. */
2956 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2957 && (!STRICT_ALIGNMENT
2958 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2967 /* For memory to memory moves, optimal behavior can be had with the
2968 existing block move logic. */
2969 if (MEM_P (x) && MEM_P (y))
2971 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2972 BLOCK_OP_NO_LIBCALL);
2973 return get_last_insn ();
2976 ret = emit_move_via_integer (mode, x, y);
2981 /* Show the output dies here. This is necessary for SUBREGs
2982 of pseudos since we cannot track their lifetimes correctly;
2983 hard regs shouldn't appear here except as return values. */
2984 if (!reload_completed && !reload_in_progress
2985 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2986 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2988 write_complex_part (x, read_complex_part (y, false), false);
2989 write_complex_part (x, read_complex_part (y, true), true);
2990 return get_last_insn ();
2993 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2994 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2997 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3001 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3004 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3005 if (code != CODE_FOR_nothing)
3007 x = emit_move_change_mode (CCmode, mode, x, true);
3008 y = emit_move_change_mode (CCmode, mode, y, true);
3009 return emit_insn (GEN_FCN (code) (x, y));
3013 /* Otherwise, find the MODE_INT mode of the same width. */
3014 ret = emit_move_via_integer (mode, x, y);
3015 gcc_assert (ret != NULL);
3019 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3020 MODE is any multi-word or full-word mode that lacks a move_insn
3021 pattern. Note that you will get better code if you define such
3022 patterns, even if they must turn into multiple assembler instructions. */
3025 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3032 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3034 /* If X is a push on the stack, do the push now and replace
3035 X with a reference to the stack pointer. */
3036 if (push_operand (x, mode))
3037 x = emit_move_resolve_push (mode, x);
3039 /* If we are in reload, see if either operand is a MEM whose address
3040 is scheduled for replacement. */
3041 if (reload_in_progress && MEM_P (x)
3042 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3043 x = replace_equiv_address_nv (x, inner);
3044 if (reload_in_progress && MEM_P (y)
3045 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3046 y = replace_equiv_address_nv (y, inner);
3050 need_clobber = false;
3052 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3055 rtx xpart = operand_subword (x, i, 1, mode);
3056 rtx ypart = operand_subword (y, i, 1, mode);
3058 /* If we can't get a part of Y, put Y into memory if it is a
3059 constant. Otherwise, force it into a register. Then we must
3060 be able to get a part of Y. */
3061 if (ypart == 0 && CONSTANT_P (y))
3063 y = force_const_mem (mode, y);
3064 ypart = operand_subword (y, i, 1, mode);
3066 else if (ypart == 0)
3067 ypart = operand_subword_force (y, i, mode);
3069 gcc_assert (xpart && ypart);
3071 need_clobber |= (GET_CODE (xpart) == SUBREG);
3073 last_insn = emit_move_insn (xpart, ypart);
3079 /* Show the output dies here. This is necessary for SUBREGs
3080 of pseudos since we cannot track their lifetimes correctly;
3081 hard regs shouldn't appear here except as return values.
3082 We never want to emit such a clobber after reload. */
3084 && ! (reload_in_progress || reload_completed)
3085 && need_clobber != 0)
3086 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3093 /* Low level part of emit_move_insn.
3094 Called just like emit_move_insn, but assumes X and Y
3095 are basically valid. */
3098 emit_move_insn_1 (rtx x, rtx y)
3100 enum machine_mode mode = GET_MODE (x);
3101 enum insn_code code;
3103 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3105 code = mov_optab->handlers[mode].insn_code;
3106 if (code != CODE_FOR_nothing)
3107 return emit_insn (GEN_FCN (code) (x, y));
3109 /* Expand complex moves by moving real part and imag part. */
3110 if (COMPLEX_MODE_P (mode))
3111 return emit_move_complex (mode, x, y);
3113 if (GET_MODE_CLASS (mode) == MODE_CC)
3114 return emit_move_ccmode (mode, x, y);
3116 /* Try using a move pattern for the corresponding integer mode. This is
3117 only safe when simplify_subreg can convert MODE constants into integer
3118 constants. At present, it can only do this reliably if the value
3119 fits within a HOST_WIDE_INT. */
3120 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3122 rtx ret = emit_move_via_integer (mode, x, y);
3127 return emit_move_multi_word (mode, x, y);
3130 /* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3135 Return the last instruction emitted. */
3138 emit_move_insn (rtx x, rtx y)
3140 enum machine_mode mode = GET_MODE (x);
3141 rtx y_cst = NULL_RTX;
3144 gcc_assert (mode != BLKmode
3145 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3151 && (last_insn = compress_float_constant (x, y)))
3156 if (!LEGITIMATE_CONSTANT_P (y))
3158 y = force_const_mem (mode, y);
3160 /* If the target's cannot_force_const_mem prevented the spill,
3161 assume that the target's move expanders will also take care
3162 of the non-legitimate constant. */
3168 /* If X or Y are memory references, verify that their addresses are valid
3171 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3172 && ! push_operand (x, GET_MODE (x)))
3174 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3175 x = validize_mem (x);
3178 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3180 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3181 y = validize_mem (y);
3183 gcc_assert (mode != BLKmode);
3185 last_insn = emit_move_insn_1 (x, y);
3187 if (y_cst && REG_P (x)
3188 && (set = single_set (last_insn)) != NULL_RTX
3189 && SET_DEST (set) == x
3190 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3191 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3196 /* If Y is representable exactly in a narrower mode, and the target can
3197 perform the extension directly from constant or memory, then emit the
3198 move as an extension. */
3201 compress_float_constant (rtx x, rtx y)
3203 enum machine_mode dstmode = GET_MODE (x);
3204 enum machine_mode orig_srcmode = GET_MODE (y);
3205 enum machine_mode srcmode;
3207 int oldcost, newcost;
3209 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3211 if (LEGITIMATE_CONSTANT_P (y))
3212 oldcost = rtx_cost (y, SET);
3214 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3216 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3217 srcmode != orig_srcmode;
3218 srcmode = GET_MODE_WIDER_MODE (srcmode))
3221 rtx trunc_y, last_insn;
3223 /* Skip if the target can't extend this way. */
3224 ic = can_extend_p (dstmode, srcmode, 0);
3225 if (ic == CODE_FOR_nothing)
3228 /* Skip if the narrowed value isn't exact. */
3229 if (! exact_real_truncate (srcmode, &r))
3232 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3234 if (LEGITIMATE_CONSTANT_P (trunc_y))
3236 /* Skip if the target needs extra instructions to perform
3238 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3240 /* This is valid, but may not be cheaper than the original. */
3241 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3242 if (oldcost < newcost)
3245 else if (float_extend_from_mem[dstmode][srcmode])
3247 trunc_y = force_const_mem (srcmode, trunc_y);
3248 /* This is valid, but may not be cheaper than the original. */
3249 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3250 if (oldcost < newcost)
3252 trunc_y = validize_mem (trunc_y);
3257 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3258 last_insn = get_last_insn ();
3261 set_unique_reg_note (last_insn, REG_EQUAL, y);
3269 /* Pushing data onto the stack. */
3271 /* Push a block of length SIZE (perhaps variable)
3272 and return an rtx to address the beginning of the block.
3273 The value may be virtual_outgoing_args_rtx.
3275 EXTRA is the number of bytes of padding to push in addition to SIZE.
3276 BELOW nonzero means this padding comes at low addresses;
3277 otherwise, the padding comes at high addresses. */
3280 push_block (rtx size, int extra, int below)
3284 size = convert_modes (Pmode, ptr_mode, size, 1);
3285 if (CONSTANT_P (size))
3286 anti_adjust_stack (plus_constant (size, extra));
3287 else if (REG_P (size) && extra == 0)
3288 anti_adjust_stack (size);
3291 temp = copy_to_mode_reg (Pmode, size);
3293 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3294 temp, 0, OPTAB_LIB_WIDEN);
3295 anti_adjust_stack (temp);
3298 #ifndef STACK_GROWS_DOWNWARD
3304 temp = virtual_outgoing_args_rtx;
3305 if (extra != 0 && below)
3306 temp = plus_constant (temp, extra);
3310 if (GET_CODE (size) == CONST_INT)
3311 temp = plus_constant (virtual_outgoing_args_rtx,
3312 -INTVAL (size) - (below ? 0 : extra));
3313 else if (extra != 0 && !below)
3314 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3315 negate_rtx (Pmode, plus_constant (size, extra)));
3317 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3318 negate_rtx (Pmode, size));
3321 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3324 #ifdef PUSH_ROUNDING
3326 /* Emit single push insn. */
3329 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3332 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 enum insn_code icode;
3335 insn_operand_predicate_fn pred;
3337 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3338 /* If there is push pattern, use it. Otherwise try old way of throwing
3339 MEM representing push operation to move expander. */
3340 icode = push_optab->handlers[(int) mode].insn_code;
3341 if (icode != CODE_FOR_nothing)
3343 if (((pred = insn_data[(int) icode].operand[0].predicate)
3344 && !((*pred) (x, mode))))
3345 x = force_reg (mode, x);
3346 emit_insn (GEN_FCN (icode) (x));
3349 if (GET_MODE_SIZE (mode) == rounded_size)
3350 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3351 /* If we are to pad downward, adjust the stack pointer first and
3352 then store X into the stack location using an offset. This is
3353 because emit_move_insn does not know how to pad; it does not have
3355 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3357 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3358 HOST_WIDE_INT offset;
3360 emit_move_insn (stack_pointer_rtx,
3361 expand_binop (Pmode,
3362 #ifdef STACK_GROWS_DOWNWARD
3368 GEN_INT (rounded_size),
3369 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3371 offset = (HOST_WIDE_INT) padding_size;
3372 #ifdef STACK_GROWS_DOWNWARD
3373 if (STACK_PUSH_CODE == POST_DEC)
3374 /* We have already decremented the stack pointer, so get the
3376 offset += (HOST_WIDE_INT) rounded_size;
3378 if (STACK_PUSH_CODE == POST_INC)
3379 /* We have already incremented the stack pointer, so get the
3381 offset -= (HOST_WIDE_INT) rounded_size;
3383 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3387 #ifdef STACK_GROWS_DOWNWARD
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3392 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3393 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3394 GEN_INT (rounded_size));
3396 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3399 dest = gen_rtx_MEM (mode, dest_addr);
3403 set_mem_attributes (dest, type, 1);
3405 if (flag_optimize_sibling_calls)
3406 /* Function incoming arguments may overlap with sibling call
3407 outgoing arguments and we cannot allow reordering of reads
3408 from function arguments with stores to outgoing arguments
3409 of sibling calls. */
3410 set_mem_alias_set (dest, 0);
3412 emit_move_insn (dest, x);
3416 /* Generate code to push X onto the stack, assuming it has mode MODE and
3418 MODE is redundant except when X is a CONST_INT (since they don't
3420 SIZE is an rtx for the size of data to be copied (in bytes),
3421 needed only if X is BLKmode.
3423 ALIGN (in bits) is maximum alignment we can assume.
3425 If PARTIAL and REG are both nonzero, then copy that many of the first
3426 bytes of X into registers starting with REG, and push the rest of X.
3427 The amount of space pushed is decreased by PARTIAL bytes.
3428 REG must be a hard register in this case.
3429 If REG is zero but PARTIAL is not, take any all others actions for an
3430 argument partially in registers, but do not actually load any
3433 EXTRA is the amount in bytes of extra space to leave next to this arg.
3434 This is ignored if an argument block has already been allocated.
3436 On a machine that lacks real push insns, ARGS_ADDR is the address of
3437 the bottom of the argument block for this call. We use indexing off there
3438 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3439 argument block has not been preallocated.
3441 ARGS_SO_FAR is the size of args previously pushed for this call.
3443 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3444 for arguments passed in registers. If nonzero, it will be the number
3445 of bytes required. */
3448 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3449 unsigned int align, int partial, rtx reg, int extra,
3450 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3454 enum direction stack_direction
3455 #ifdef STACK_GROWS_DOWNWARD
3461 /* Decide where to pad the argument: `downward' for below,
3462 `upward' for above, or `none' for don't pad it.
3463 Default is below for small data on big-endian machines; else above. */
3464 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3466 /* Invert direction if stack is post-decrement.
3468 if (STACK_PUSH_CODE == POST_DEC)
3469 if (where_pad != none)
3470 where_pad = (where_pad == downward ? upward : downward);
3474 if (mode == BLKmode)
3476 /* Copy a block into the stack, entirely or partially. */
3483 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3484 used = partial - offset;
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3492 xinner = adjust_address (xinner, BLKmode, used);
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
3498 skip = (reg_parm_stack_space == 0) ? 0 : used;
3500 #ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3506 && GET_CODE (size) == CONST_INT
3508 && MEM_ALIGN (xinner) >= align
3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3514 || align >= BIGGEST_ALIGNMENT
3515 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3516 == (align / BITS_PER_UNIT)))
3517 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra && args_addr == 0
3523 && where_pad != none && where_pad != stack_direction)
3524 anti_adjust_stack (GEN_INT (extra));
3526 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3529 #endif /* PUSH_ROUNDING */
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3536 /* Deduct words put into registers from the size we must copy. */
3539 if (GET_CODE (size) == CONST_INT)
3540 size = GEN_INT (INTVAL (size) - used);
3542 size = expand_binop (GET_MODE (size), sub_optab, size,
3543 GEN_INT (used), NULL_RTX, 0,
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3552 temp = push_block (size, extra, where_pad == downward);
3555 else if (GET_CODE (args_so_far) == CONST_INT)
3556 temp = memory_address (BLKmode,
3557 plus_constant (args_addr,
3558 skip + INTVAL (args_so_far)));
3560 temp = memory_address (BLKmode,
3561 plus_constant (gen_rtx_PLUS (Pmode,
3566 if (!ACCUMULATE_OUTGOING_ARGS)
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3574 temp = copy_to_reg (temp);
3577 target = gen_rtx_MEM (BLKmode, temp);
3579 /* We do *not* set_mem_attributes here, because incoming arguments
3580 may overlap with sibling call outgoing arguments and we cannot
3581 allow reordering of reads from function arguments with stores
3582 to outgoing arguments of sibling calls. We do, however, want
3583 to record the alignment of the stack slot. */
3584 /* ALIGN may well be better aligned than TYPE, e.g. due to
3585 PARM_BOUNDARY. Assume the caller isn't lying. */
3586 set_mem_align (target, align);
3588 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3591 else if (partial > 0)
3593 /* Scalar partly in registers. */
3595 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3598 /* # bytes of start of argument
3599 that we must make space for but need not store. */
3600 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3601 int args_offset = INTVAL (args_so_far);
3604 /* Push padding now if padding above and stack grows down,
3605 or if padding below and stack grows up.
3606 But if space already allocated, this has already been done. */
3607 if (extra && args_addr == 0
3608 && where_pad != none && where_pad != stack_direction)
3609 anti_adjust_stack (GEN_INT (extra));
3611 /* If we make space by pushing it, we might as well push
3612 the real data. Otherwise, we can leave OFFSET nonzero
3613 and leave the space uninitialized. */
3617 /* Now NOT_STACK gets the number of words that we don't need to
3618 allocate on the stack. Convert OFFSET to words too. */
3619 not_stack = (partial - offset) / UNITS_PER_WORD;
3620 offset /= UNITS_PER_WORD;
3622 /* If the partial register-part of the arg counts in its stack size,
3623 skip the part of stack space corresponding to the registers.
3624 Otherwise, start copying to the beginning of the stack space,
3625 by setting SKIP to 0. */
3626 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3628 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3629 x = validize_mem (force_const_mem (mode, x));
3631 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3632 SUBREGs of such registers are not allowed. */
3633 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3634 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3635 x = copy_to_reg (x);
3637 /* Loop over all the words allocated on the stack for this arg. */
3638 /* We can do it by words, because any scalar bigger than a word
3639 has a size a multiple of a word. */
3640 #ifndef PUSH_ARGS_REVERSED
3641 for (i = not_stack; i < size; i++)
3643 for (i = size - 1; i >= not_stack; i--)
3645 if (i >= not_stack + offset)
3646 emit_push_insn (operand_subword_force (x, i, mode),
3647 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3649 GEN_INT (args_offset + ((i - not_stack + skip)
3651 reg_parm_stack_space, alignment_pad);
3658 /* Push padding now if padding above and stack grows down,
3659 or if padding below and stack grows up.
3660 But if space already allocated, this has already been done. */
3661 if (extra && args_addr == 0
3662 && where_pad != none && where_pad != stack_direction)
3663 anti_adjust_stack (GEN_INT (extra));
3665 #ifdef PUSH_ROUNDING
3666 if (args_addr == 0 && PUSH_ARGS)
3667 emit_single_push_insn (mode, x, type);
3671 if (GET_CODE (args_so_far) == CONST_INT)
3673 = memory_address (mode,
3674 plus_constant (args_addr,
3675 INTVAL (args_so_far)));
3677 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3679 dest = gen_rtx_MEM (mode, addr);
3681 /* We do *not* set_mem_attributes here, because incoming arguments
3682 may overlap with sibling call outgoing arguments and we cannot
3683 allow reordering of reads from function arguments with stores
3684 to outgoing arguments of sibling calls. We do, however, want
3685 to record the alignment of the stack slot. */
3686 /* ALIGN may well be better aligned than TYPE, e.g. due to
3687 PARM_BOUNDARY. Assume the caller isn't lying. */
3688 set_mem_align (dest, align);
3690 emit_move_insn (dest, x);
3694 /* If part should go in registers, copy that part
3695 into the appropriate registers. Do this now, at the end,
3696 since mem-to-mem copies above may do function calls. */
3697 if (partial > 0 && reg != 0)
3699 /* Handle calls that pass values in multiple non-contiguous locations.
3700 The Irix 6 ABI has examples of this. */
3701 if (GET_CODE (reg) == PARALLEL)
3702 emit_group_load (reg, x, type, -1);
3705 gcc_assert (partial % UNITS_PER_WORD == 0);
3706 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3710 if (extra && args_addr == 0 && where_pad == stack_direction)
3711 anti_adjust_stack (GEN_INT (extra));
3713 if (alignment_pad && args_addr == 0)
3714 anti_adjust_stack (alignment_pad);
3717 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3721 get_subtarget (rtx x)
3725 /* Only registers can be subtargets. */
3727 /* Don't use hard regs to avoid extending their life. */
3728 || REGNO (x) < FIRST_PSEUDO_REGISTER
3732 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3733 FIELD is a bitfield. Returns true if the optimization was successful,
3734 and there's nothing else to do. */
3737 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3738 unsigned HOST_WIDE_INT bitpos,
3739 enum machine_mode mode1, rtx str_rtx,
3742 enum machine_mode str_mode = GET_MODE (str_rtx);
3743 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3748 if (mode1 != VOIDmode
3749 || bitsize >= BITS_PER_WORD
3750 || str_bitsize > BITS_PER_WORD
3751 || TREE_SIDE_EFFECTS (to)
3752 || TREE_THIS_VOLATILE (to))
3756 if (!BINARY_CLASS_P (src)
3757 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3760 op0 = TREE_OPERAND (src, 0);
3761 op1 = TREE_OPERAND (src, 1);
3764 if (!operand_equal_p (to, op0, 0))
3767 if (MEM_P (str_rtx))
3769 unsigned HOST_WIDE_INT offset1;
3771 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3772 str_mode = word_mode;
3773 str_mode = get_best_mode (bitsize, bitpos,
3774 MEM_ALIGN (str_rtx), str_mode, 0);
3775 if (str_mode == VOIDmode)
3777 str_bitsize = GET_MODE_BITSIZE (str_mode);
3780 bitpos %= str_bitsize;
3781 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3782 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3784 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3787 /* If the bit field covers the whole REG/MEM, store_field
3788 will likely generate better code. */
3789 if (bitsize >= str_bitsize)
3792 /* We can't handle fields split across multiple entities. */
3793 if (bitpos + bitsize > str_bitsize)
3796 if (BYTES_BIG_ENDIAN)
3797 bitpos = str_bitsize - bitpos - bitsize;
3799 switch (TREE_CODE (src))
3803 /* For now, just optimize the case of the topmost bitfield
3804 where we don't need to do any masking and also
3805 1 bit bitfields where xor can be used.
3806 We might win by one instruction for the other bitfields
3807 too if insv/extv instructions aren't used, so that
3808 can be added later. */
3809 if (bitpos + bitsize != str_bitsize
3810 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3813 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3814 value = convert_modes (str_mode,
3815 TYPE_MODE (TREE_TYPE (op1)), value,
3816 TYPE_UNSIGNED (TREE_TYPE (op1)));
3818 /* We may be accessing data outside the field, which means
3819 we can alias adjacent data. */
3820 if (MEM_P (str_rtx))
3822 str_rtx = shallow_copy_rtx (str_rtx);
3823 set_mem_alias_set (str_rtx, 0);
3824 set_mem_expr (str_rtx, 0);
3827 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3828 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3830 value = expand_and (str_mode, value, const1_rtx, NULL);
3833 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3834 build_int_cst (NULL_TREE, bitpos),
3836 result = expand_binop (str_mode, binop, str_rtx,
3837 value, str_rtx, 1, OPTAB_WIDEN);
3838 if (result != str_rtx)
3839 emit_move_insn (str_rtx, result);
3844 if (TREE_CODE (op1) != INTEGER_CST)
3846 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3847 value = convert_modes (GET_MODE (str_rtx),
3848 TYPE_MODE (TREE_TYPE (op1)), value,
3849 TYPE_UNSIGNED (TREE_TYPE (op1)));
3851 /* We may be accessing data outside the field, which means
3852 we can alias adjacent data. */
3853 if (MEM_P (str_rtx))
3855 str_rtx = shallow_copy_rtx (str_rtx);
3856 set_mem_alias_set (str_rtx, 0);
3857 set_mem_expr (str_rtx, 0);
3860 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3861 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3863 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3865 value = expand_and (GET_MODE (str_rtx), value, mask,
3868 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3869 build_int_cst (NULL_TREE, bitpos),
3871 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3872 value, str_rtx, 1, OPTAB_WIDEN);
3873 if (result != str_rtx)
3874 emit_move_insn (str_rtx, result);
3885 /* Expand an assignment that stores the value of FROM into TO. */
3888 expand_assignment (tree to, tree from)
3893 /* Don't crash if the lhs of the assignment was erroneous. */
3895 if (TREE_CODE (to) == ERROR_MARK)
3897 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3901 /* Assignment of a structure component needs special treatment
3902 if the structure component's rtx is not simply a MEM.
3903 Assignment of an array element at a constant index, and assignment of
3904 an array element in an unaligned packed structure field, has the same
3906 if (handled_component_p (to)
3907 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3909 enum machine_mode mode1;
3910 HOST_WIDE_INT bitsize, bitpos;
3917 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3918 &unsignedp, &volatilep, true);
3920 /* If we are going to use store_bit_field and extract_bit_field,
3921 make sure to_rtx will be safe for multiple use. */
3923 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3927 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3929 gcc_assert (MEM_P (to_rtx));
3931 #ifdef POINTERS_EXTEND_UNSIGNED
3932 if (GET_MODE (offset_rtx) != Pmode)
3933 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3935 if (GET_MODE (offset_rtx) != ptr_mode)
3936 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3939 /* A constant address in TO_RTX can have VOIDmode, we must not try
3940 to call force_reg for that case. Avoid that case. */
3942 && GET_MODE (to_rtx) == BLKmode
3943 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3945 && (bitpos % bitsize) == 0
3946 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3947 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3949 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3953 to_rtx = offset_address (to_rtx, offset_rtx,
3954 highest_pow2_factor_for_target (to,
3958 /* Handle expand_expr of a complex value returning a CONCAT. */
3959 if (GET_CODE (to_rtx) == CONCAT)
3961 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3963 gcc_assert (bitpos == 0);
3964 result = store_expr (from, to_rtx, false);
3968 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3969 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3976 /* If the field is at offset zero, we could have been given the
3977 DECL_RTX of the parent struct. Don't munge it. */
3978 to_rtx = shallow_copy_rtx (to_rtx);
3980 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3982 /* Deal with volatile and readonly fields. The former is only
3983 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3985 MEM_VOLATILE_P (to_rtx) = 1;
3986 if (component_uses_parent_alias_set (to))
3987 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3990 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3994 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3995 TREE_TYPE (tem), get_alias_set (to));
3999 preserve_temp_slots (result);
4005 /* If the rhs is a function call and its value is not an aggregate,
4006 call the function before we start to compute the lhs.
4007 This is needed for correct code for cases such as
4008 val = setjmp (buf) on machines where reference to val
4009 requires loading up part of an address in a separate insn.
4011 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4012 since it might be a promoted variable where the zero- or sign- extension
4013 needs to be done. Handling this in the normal way is safe because no
4014 computation is done before the call. */
4015 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4016 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4017 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4018 && REG_P (DECL_RTL (to))))
4023 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4025 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4027 /* Handle calls that return values in multiple non-contiguous locations.
4028 The Irix 6 ABI has examples of this. */
4029 if (GET_CODE (to_rtx) == PARALLEL)
4030 emit_group_load (to_rtx, value, TREE_TYPE (from),
4031 int_size_in_bytes (TREE_TYPE (from)));
4032 else if (GET_MODE (to_rtx) == BLKmode)
4033 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4036 if (POINTER_TYPE_P (TREE_TYPE (to)))
4037 value = convert_memory_address (GET_MODE (to_rtx), value);
4038 emit_move_insn (to_rtx, value);
4040 preserve_temp_slots (to_rtx);
4046 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4047 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4050 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4052 /* Don't move directly into a return register. */
4053 if (TREE_CODE (to) == RESULT_DECL
4054 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4059 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4061 if (GET_CODE (to_rtx) == PARALLEL)
4062 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4063 int_size_in_bytes (TREE_TYPE (from)));
4065 emit_move_insn (to_rtx, temp);
4067 preserve_temp_slots (to_rtx);
4073 /* In case we are returning the contents of an object which overlaps
4074 the place the value is being stored, use a safe function when copying
4075 a value through a pointer into a structure value return block. */
4076 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4077 && current_function_returns_struct
4078 && !current_function_returns_pcc_struct)
4083 size = expr_size (from);
4084 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4086 emit_library_call (memmove_libfunc, LCT_NORMAL,
4087 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4088 XEXP (from_rtx, 0), Pmode,
4089 convert_to_mode (TYPE_MODE (sizetype),
4090 size, TYPE_UNSIGNED (sizetype)),
4091 TYPE_MODE (sizetype));
4093 preserve_temp_slots (to_rtx);
4099 /* Compute FROM and store the value in the rtx we got. */
4102 result = store_expr (from, to_rtx, 0);
4103 preserve_temp_slots (result);
4109 /* Generate code for computing expression EXP,
4110 and storing the value into TARGET.
4112 If the mode is BLKmode then we may return TARGET itself.
4113 It turns out that in BLKmode it doesn't cause a problem.
4114 because C has no operators that could combine two different
4115 assignments into the same BLKmode object with different values
4116 with no sequence point. Will other languages need this to
4119 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4120 stack, and block moves may need to be treated specially. */
4123 store_expr (tree exp, rtx target, int call_param_p)
4126 rtx alt_rtl = NULL_RTX;
4127 int dont_return_target = 0;
4129 if (VOID_TYPE_P (TREE_TYPE (exp)))
4131 /* C++ can generate ?: expressions with a throw expression in one
4132 branch and an rvalue in the other. Here, we resolve attempts to
4133 store the throw expression's nonexistent result. */
4134 gcc_assert (!call_param_p);
4135 expand_expr (exp, const0_rtx, VOIDmode, 0);
4138 if (TREE_CODE (exp) == COMPOUND_EXPR)
4140 /* Perform first part of compound expression, then assign from second
4142 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4143 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4144 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4146 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4148 /* For conditional expression, get safe form of the target. Then
4149 test the condition, doing the appropriate assignment on either
4150 side. This avoids the creation of unnecessary temporaries.
4151 For non-BLKmode, it is more efficient not to do this. */
4153 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4155 do_pending_stack_adjust ();
4157 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4158 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4159 emit_jump_insn (gen_jump (lab2));
4162 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4168 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4169 /* If this is a scalar in a register that is stored in a wider mode
4170 than the declared mode, compute the result into its declared mode
4171 and then convert to the wider mode. Our value is the computed
4174 rtx inner_target = 0;
4176 /* We can do the conversion inside EXP, which will often result
4177 in some optimizations. Do the conversion in two steps: first
4178 change the signedness, if needed, then the extend. But don't
4179 do this if the type of EXP is a subtype of something else
4180 since then the conversion might involve more than just
4181 converting modes. */
4182 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4183 && TREE_TYPE (TREE_TYPE (exp)) == 0
4184 && (!lang_hooks.reduce_bit_field_operations
4185 || (GET_MODE_PRECISION (GET_MODE (target))
4186 == TYPE_PRECISION (TREE_TYPE (exp)))))
4188 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4189 != SUBREG_PROMOTED_UNSIGNED_P (target))
4191 (lang_hooks.types.signed_or_unsigned_type
4192 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4194 exp = convert (lang_hooks.types.type_for_mode
4195 (GET_MODE (SUBREG_REG (target)),
4196 SUBREG_PROMOTED_UNSIGNED_P (target)),
4199 inner_target = SUBREG_REG (target);
4202 temp = expand_expr (exp, inner_target, VOIDmode,
4203 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4205 /* If TEMP is a VOIDmode constant, use convert_modes to make
4206 sure that we properly convert it. */
4207 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4209 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4210 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4211 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4212 GET_MODE (target), temp,
4213 SUBREG_PROMOTED_UNSIGNED_P (target));
4216 convert_move (SUBREG_REG (target), temp,
4217 SUBREG_PROMOTED_UNSIGNED_P (target));
4223 temp = expand_expr_real (exp, target, GET_MODE (target),
4225 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4227 /* Return TARGET if it's a specified hardware register.
4228 If TARGET is a volatile mem ref, either return TARGET
4229 or return a reg copied *from* TARGET; ANSI requires this.
4231 Otherwise, if TEMP is not TARGET, return TEMP
4232 if it is constant (for efficiency),
4233 or if we really want the correct value. */
4234 if (!(target && REG_P (target)
4235 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4236 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4237 && ! rtx_equal_p (temp, target)
4238 && CONSTANT_P (temp))
4239 dont_return_target = 1;
4242 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4243 the same as that of TARGET, adjust the constant. This is needed, for
4244 example, in case it is a CONST_DOUBLE and we want only a word-sized
4246 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4247 && TREE_CODE (exp) != ERROR_MARK
4248 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4249 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4250 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4252 /* If value was not generated in the target, store it there.
4253 Convert the value to TARGET's type first if necessary and emit the
4254 pending incrementations that have been queued when expanding EXP.
4255 Note that we cannot emit the whole queue blindly because this will
4256 effectively disable the POST_INC optimization later.
4258 If TEMP and TARGET compare equal according to rtx_equal_p, but
4259 one or both of them are volatile memory refs, we have to distinguish
4261 - expand_expr has used TARGET. In this case, we must not generate
4262 another copy. This can be detected by TARGET being equal according
4264 - expand_expr has not used TARGET - that means that the source just
4265 happens to have the same RTX form. Since temp will have been created
4266 by expand_expr, it will compare unequal according to == .
4267 We must generate a copy in this case, to reach the correct number
4268 of volatile memory references. */
4270 if ((! rtx_equal_p (temp, target)
4271 || (temp != target && (side_effects_p (temp)
4272 || side_effects_p (target))))
4273 && TREE_CODE (exp) != ERROR_MARK
4274 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4275 but TARGET is not valid memory reference, TEMP will differ
4276 from TARGET although it is really the same location. */
4277 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4278 /* If there's nothing to copy, don't bother. Don't call
4279 expr_size unless necessary, because some front-ends (C++)
4280 expr_size-hook must not be given objects that are not
4281 supposed to be bit-copied or bit-initialized. */
4282 && expr_size (exp) != const0_rtx)
4284 if (GET_MODE (temp) != GET_MODE (target)
4285 && GET_MODE (temp) != VOIDmode)
4287 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4288 if (dont_return_target)
4290 /* In this case, we will return TEMP,
4291 so make sure it has the proper mode.
4292 But don't forget to store the value into TARGET. */
4293 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4294 emit_move_insn (target, temp);
4297 convert_move (target, temp, unsignedp);
4300 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4302 /* Handle copying a string constant into an array. The string
4303 constant may be shorter than the array. So copy just the string's
4304 actual length, and clear the rest. First get the size of the data
4305 type of the string, which is actually the size of the target. */
4306 rtx size = expr_size (exp);
4308 if (GET_CODE (size) == CONST_INT
4309 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4310 emit_block_move (target, temp, size,
4312 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4315 /* Compute the size of the data to copy from the string. */
4317 = size_binop (MIN_EXPR,
4318 make_tree (sizetype, size),
4319 size_int (TREE_STRING_LENGTH (exp)));
4321 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4323 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4326 /* Copy that much. */
4327 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4328 TYPE_UNSIGNED (sizetype));
4329 emit_block_move (target, temp, copy_size_rtx,
4331 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4333 /* Figure out how much is left in TARGET that we have to clear.
4334 Do all calculations in ptr_mode. */
4335 if (GET_CODE (copy_size_rtx) == CONST_INT)
4337 size = plus_constant (size, -INTVAL (copy_size_rtx));
4338 target = adjust_address (target, BLKmode,
4339 INTVAL (copy_size_rtx));
4343 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4344 copy_size_rtx, NULL_RTX, 0,
4347 #ifdef POINTERS_EXTEND_UNSIGNED
4348 if (GET_MODE (copy_size_rtx) != Pmode)
4349 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4350 TYPE_UNSIGNED (sizetype));
4353 target = offset_address (target, copy_size_rtx,
4354 highest_pow2_factor (copy_size));
4355 label = gen_label_rtx ();
4356 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4357 GET_MODE (size), 0, label);
4360 if (size != const0_rtx)
4361 clear_storage (target, size, BLOCK_OP_NORMAL);
4367 /* Handle calls that return values in multiple non-contiguous locations.
4368 The Irix 6 ABI has examples of this. */
4369 else if (GET_CODE (target) == PARALLEL)
4370 emit_group_load (target, temp, TREE_TYPE (exp),
4371 int_size_in_bytes (TREE_TYPE (exp)));
4372 else if (GET_MODE (temp) == BLKmode)
4373 emit_block_move (target, temp, expr_size (exp),
4375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4378 temp = force_operand (temp, target);
4380 emit_move_insn (target, temp);
4387 /* Examine CTOR to discover:
4388 * how many scalar fields are set to nonzero values,
4389 and place it in *P_NZ_ELTS;
4390 * how many scalar fields are set to non-constant values,
4391 and place it in *P_NC_ELTS; and
4392 * how many scalar fields in total are in CTOR,
4393 and place it in *P_ELT_COUNT.
4394 * if a type is a union, and the initializer from the constructor
4395 is not the largest element in the union, then set *p_must_clear. */
4398 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4399 HOST_WIDE_INT *p_nc_elts,
4400 HOST_WIDE_INT *p_elt_count,
4403 unsigned HOST_WIDE_INT idx;
4404 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4405 tree value, purpose;
4411 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4416 if (TREE_CODE (purpose) == RANGE_EXPR)
4418 tree lo_index = TREE_OPERAND (purpose, 0);
4419 tree hi_index = TREE_OPERAND (purpose, 1);
4421 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4422 mult = (tree_low_cst (hi_index, 1)
4423 - tree_low_cst (lo_index, 1) + 1);
4426 switch (TREE_CODE (value))
4430 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4431 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4432 nz_elts += mult * nz;
4433 nc_elts += mult * nc;
4434 elt_count += mult * ic;
4440 if (!initializer_zerop (value))
4446 nz_elts += mult * TREE_STRING_LENGTH (value);
4447 elt_count += mult * TREE_STRING_LENGTH (value);
4451 if (!initializer_zerop (TREE_REALPART (value)))
4453 if (!initializer_zerop (TREE_IMAGPART (value)))
4461 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4463 if (!initializer_zerop (TREE_VALUE (v)))
4473 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4480 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4481 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4484 bool clear_this = true;
4486 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4488 /* We don't expect more than one element of the union to be
4489 initialized. Not sure what we should do otherwise... */
4490 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4493 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4494 CONSTRUCTOR_ELTS (ctor),
4497 /* ??? We could look at each element of the union, and find the
4498 largest element. Which would avoid comparing the size of the
4499 initialized element against any tail padding in the union.
4500 Doesn't seem worth the effort... */
4501 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4502 TYPE_SIZE (init_sub_type)) == 1)
4504 /* And now we have to find out if the element itself is fully
4505 constructed. E.g. for union { struct { int a, b; } s; } u
4506 = { .s = { .a = 1 } }. */
4507 if (elt_count == count_type_elements (init_sub_type, false))
4512 *p_must_clear = clear_this;
4515 *p_nz_elts += nz_elts;
4516 *p_nc_elts += nc_elts;
4517 *p_elt_count += elt_count;
4521 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4522 HOST_WIDE_INT *p_nc_elts,
4523 HOST_WIDE_INT *p_elt_count,
4529 *p_must_clear = false;
4530 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4534 /* Count the number of scalars in TYPE. Return -1 on overflow or
4535 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4536 array member at the end of the structure. */
4539 count_type_elements (tree type, bool allow_flexarr)
4541 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4542 switch (TREE_CODE (type))
4546 tree telts = array_type_nelts (type);
4547 if (telts && host_integerp (telts, 1))
4549 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4550 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4553 else if (max / n > m)
4561 HOST_WIDE_INT n = 0, t;
4564 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4565 if (TREE_CODE (f) == FIELD_DECL)
4567 t = count_type_elements (TREE_TYPE (f), false);
4570 /* Check for structures with flexible array member. */
4571 tree tf = TREE_TYPE (f);
4573 && TREE_CHAIN (f) == NULL
4574 && TREE_CODE (tf) == ARRAY_TYPE
4576 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4577 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4578 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4579 && int_size_in_bytes (type) >= 0)
4591 case QUAL_UNION_TYPE:
4593 /* Ho hum. How in the world do we guess here? Clearly it isn't
4594 right to count the fields. Guess based on the number of words. */
4595 HOST_WIDE_INT n = int_size_in_bytes (type);
4598 return n / UNITS_PER_WORD;
4605 return TYPE_VECTOR_SUBPARTS (type);
4614 case REFERENCE_TYPE:
4626 /* Return 1 if EXP contains mostly (3/4) zeros. */
4629 mostly_zeros_p (tree exp)
4631 if (TREE_CODE (exp) == CONSTRUCTOR)
4634 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4637 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4641 elts = count_type_elements (TREE_TYPE (exp), false);
4643 return nz_elts < elts / 4;
4646 return initializer_zerop (exp);
4649 /* Helper function for store_constructor.
4650 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4651 TYPE is the type of the CONSTRUCTOR, not the element type.
4652 CLEARED is as for store_constructor.
4653 ALIAS_SET is the alias set to use for any stores.
4655 This provides a recursive shortcut back to store_constructor when it isn't
4656 necessary to go through store_field. This is so that we can pass through
4657 the cleared field to let store_constructor know that we may not have to
4658 clear a substructure if the outer structure has already been cleared. */
4661 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4662 HOST_WIDE_INT bitpos, enum machine_mode mode,
4663 tree exp, tree type, int cleared, int alias_set)
4665 if (TREE_CODE (exp) == CONSTRUCTOR
4666 /* We can only call store_constructor recursively if the size and
4667 bit position are on a byte boundary. */
4668 && bitpos % BITS_PER_UNIT == 0
4669 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4670 /* If we have a nonzero bitpos for a register target, then we just
4671 let store_field do the bitfield handling. This is unlikely to
4672 generate unnecessary clear instructions anyways. */
4673 && (bitpos == 0 || MEM_P (target)))
4677 = adjust_address (target,
4678 GET_MODE (target) == BLKmode
4680 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4681 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4684 /* Update the alias set, if required. */
4685 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4686 && MEM_ALIAS_SET (target) != 0)
4688 target = copy_rtx (target);
4689 set_mem_alias_set (target, alias_set);
4692 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4695 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4698 /* Store the value of constructor EXP into the rtx TARGET.
4699 TARGET is either a REG or a MEM; we know it cannot conflict, since
4700 safe_from_p has been called.
4701 CLEARED is true if TARGET is known to have been zero'd.
4702 SIZE is the number of bytes of TARGET we are allowed to modify: this
4703 may not be the same as the size of EXP if we are assigning to a field
4704 which has been packed to exclude padding bits. */
4707 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4709 tree type = TREE_TYPE (exp);
4710 #ifdef WORD_REGISTER_OPERATIONS
4711 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4714 switch (TREE_CODE (type))
4718 case QUAL_UNION_TYPE:
4720 unsigned HOST_WIDE_INT idx;
4723 /* If size is zero or the target is already cleared, do nothing. */
4724 if (size == 0 || cleared)
4726 /* We either clear the aggregate or indicate the value is dead. */
4727 else if ((TREE_CODE (type) == UNION_TYPE
4728 || TREE_CODE (type) == QUAL_UNION_TYPE)
4729 && ! CONSTRUCTOR_ELTS (exp))
4730 /* If the constructor is empty, clear the union. */
4732 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4736 /* If we are building a static constructor into a register,
4737 set the initial value as zero so we can fold the value into
4738 a constant. But if more than one register is involved,
4739 this probably loses. */
4740 else if (REG_P (target) && TREE_STATIC (exp)
4741 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4743 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4747 /* If the constructor has fewer fields than the structure or
4748 if we are initializing the structure to mostly zeros, clear
4749 the whole structure first. Don't do this if TARGET is a
4750 register whose mode size isn't equal to SIZE since
4751 clear_storage can't handle this case. */
4753 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4754 != fields_length (type))
4755 || mostly_zeros_p (exp))
4757 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4760 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4765 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4767 /* Store each element of the constructor into the
4768 corresponding field of TARGET. */
4769 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4771 enum machine_mode mode;
4772 HOST_WIDE_INT bitsize;
4773 HOST_WIDE_INT bitpos = 0;
4775 rtx to_rtx = target;
4777 /* Just ignore missing fields. We cleared the whole
4778 structure, above, if any fields are missing. */
4782 if (cleared && initializer_zerop (value))
4785 if (host_integerp (DECL_SIZE (field), 1))
4786 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4790 mode = DECL_MODE (field);
4791 if (DECL_BIT_FIELD (field))
4794 offset = DECL_FIELD_OFFSET (field);
4795 if (host_integerp (offset, 0)
4796 && host_integerp (bit_position (field), 0))
4798 bitpos = int_bit_position (field);
4802 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4809 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4810 make_tree (TREE_TYPE (exp),
4813 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4814 gcc_assert (MEM_P (to_rtx));
4816 #ifdef POINTERS_EXTEND_UNSIGNED
4817 if (GET_MODE (offset_rtx) != Pmode)
4818 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4820 if (GET_MODE (offset_rtx) != ptr_mode)
4821 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4824 to_rtx = offset_address (to_rtx, offset_rtx,
4825 highest_pow2_factor (offset));
4828 #ifdef WORD_REGISTER_OPERATIONS
4829 /* If this initializes a field that is smaller than a
4830 word, at the start of a word, try to widen it to a full
4831 word. This special case allows us to output C++ member
4832 function initializations in a form that the optimizers
4835 && bitsize < BITS_PER_WORD
4836 && bitpos % BITS_PER_WORD == 0
4837 && GET_MODE_CLASS (mode) == MODE_INT
4838 && TREE_CODE (value) == INTEGER_CST
4840 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4842 tree type = TREE_TYPE (value);
4844 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4846 type = lang_hooks.types.type_for_size
4847 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4848 value = convert (type, value);
4851 if (BYTES_BIG_ENDIAN)
4853 = fold_build2 (LSHIFT_EXPR, type, value,
4854 build_int_cst (NULL_TREE,
4855 BITS_PER_WORD - bitsize));
4856 bitsize = BITS_PER_WORD;
4861 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4862 && DECL_NONADDRESSABLE_P (field))
4864 to_rtx = copy_rtx (to_rtx);
4865 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4868 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4869 value, type, cleared,
4870 get_alias_set (TREE_TYPE (field)));
4877 unsigned HOST_WIDE_INT i;
4880 tree elttype = TREE_TYPE (type);
4882 HOST_WIDE_INT minelt = 0;
4883 HOST_WIDE_INT maxelt = 0;
4885 domain = TYPE_DOMAIN (type);
4886 const_bounds_p = (TYPE_MIN_VALUE (domain)
4887 && TYPE_MAX_VALUE (domain)
4888 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4889 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4891 /* If we have constant bounds for the range of the type, get them. */
4894 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4895 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4898 /* If the constructor has fewer elements than the array, clear
4899 the whole array first. Similarly if this is static
4900 constructor of a non-BLKmode object. */
4903 else if (REG_P (target) && TREE_STATIC (exp))
4907 unsigned HOST_WIDE_INT idx;
4909 HOST_WIDE_INT count = 0, zero_count = 0;
4910 need_to_clear = ! const_bounds_p;
4912 /* This loop is a more accurate version of the loop in
4913 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4914 is also needed to check for missing elements. */
4915 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4917 HOST_WIDE_INT this_node_count;
4922 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4924 tree lo_index = TREE_OPERAND (index, 0);
4925 tree hi_index = TREE_OPERAND (index, 1);
4927 if (! host_integerp (lo_index, 1)
4928 || ! host_integerp (hi_index, 1))
4934 this_node_count = (tree_low_cst (hi_index, 1)
4935 - tree_low_cst (lo_index, 1) + 1);
4938 this_node_count = 1;
4940 count += this_node_count;
4941 if (mostly_zeros_p (value))
4942 zero_count += this_node_count;
4945 /* Clear the entire array first if there are any missing
4946 elements, or if the incidence of zero elements is >=
4949 && (count < maxelt - minelt + 1
4950 || 4 * zero_count >= 3 * count))
4954 if (need_to_clear && size > 0)
4957 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4959 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4963 if (!cleared && REG_P (target))
4964 /* Inform later passes that the old value is dead. */
4965 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4967 /* Store each element of the constructor into the
4968 corresponding element of TARGET, determined by counting the
4970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
4972 enum machine_mode mode;
4973 HOST_WIDE_INT bitsize;
4974 HOST_WIDE_INT bitpos;
4976 rtx xtarget = target;
4978 if (cleared && initializer_zerop (value))
4981 unsignedp = TYPE_UNSIGNED (elttype);
4982 mode = TYPE_MODE (elttype);
4983 if (mode == BLKmode)
4984 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4985 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4988 bitsize = GET_MODE_BITSIZE (mode);
4990 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4992 tree lo_index = TREE_OPERAND (index, 0);
4993 tree hi_index = TREE_OPERAND (index, 1);
4994 rtx index_r, pos_rtx;
4995 HOST_WIDE_INT lo, hi, count;
4998 /* If the range is constant and "small", unroll the loop. */
5000 && host_integerp (lo_index, 0)
5001 && host_integerp (hi_index, 0)
5002 && (lo = tree_low_cst (lo_index, 0),
5003 hi = tree_low_cst (hi_index, 0),
5004 count = hi - lo + 1,
5007 || (host_integerp (TYPE_SIZE (elttype), 1)
5008 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5011 lo -= minelt; hi -= minelt;
5012 for (; lo <= hi; lo++)
5014 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5017 && !MEM_KEEP_ALIAS_SET_P (target)
5018 && TREE_CODE (type) == ARRAY_TYPE
5019 && TYPE_NONALIASED_COMPONENT (type))
5021 target = copy_rtx (target);
5022 MEM_KEEP_ALIAS_SET_P (target) = 1;
5025 store_constructor_field
5026 (target, bitsize, bitpos, mode, value, type, cleared,
5027 get_alias_set (elttype));
5032 rtx loop_start = gen_label_rtx ();
5033 rtx loop_end = gen_label_rtx ();
5036 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5037 unsignedp = TYPE_UNSIGNED (domain);
5039 index = build_decl (VAR_DECL, NULL_TREE, domain);
5042 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5044 SET_DECL_RTL (index, index_r);
5045 store_expr (lo_index, index_r, 0);
5047 /* Build the head of the loop. */
5048 do_pending_stack_adjust ();
5049 emit_label (loop_start);
5051 /* Assign value to element index. */
5053 = convert (ssizetype,
5054 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5055 index, TYPE_MIN_VALUE (domain)));
5056 position = size_binop (MULT_EXPR, position,
5058 TYPE_SIZE_UNIT (elttype)));
5060 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5061 xtarget = offset_address (target, pos_rtx,
5062 highest_pow2_factor (position));
5063 xtarget = adjust_address (xtarget, mode, 0);
5064 if (TREE_CODE (value) == CONSTRUCTOR)
5065 store_constructor (value, xtarget, cleared,
5066 bitsize / BITS_PER_UNIT);
5068 store_expr (value, xtarget, 0);
5070 /* Generate a conditional jump to exit the loop. */
5071 exit_cond = build2 (LT_EXPR, integer_type_node,
5073 jumpif (exit_cond, loop_end);
5075 /* Update the loop counter, and jump to the head of
5077 expand_assignment (index,
5078 build2 (PLUS_EXPR, TREE_TYPE (index),
5079 index, integer_one_node));
5081 emit_jump (loop_start);
5083 /* Build the end of the loop. */
5084 emit_label (loop_end);
5087 else if ((index != 0 && ! host_integerp (index, 0))
5088 || ! host_integerp (TYPE_SIZE (elttype), 1))
5093 index = ssize_int (1);
5096 index = fold_convert (ssizetype,
5097 fold_build2 (MINUS_EXPR,
5100 TYPE_MIN_VALUE (domain)));
5102 position = size_binop (MULT_EXPR, index,
5104 TYPE_SIZE_UNIT (elttype)));
5105 xtarget = offset_address (target,
5106 expand_expr (position, 0, VOIDmode, 0),
5107 highest_pow2_factor (position));
5108 xtarget = adjust_address (xtarget, mode, 0);
5109 store_expr (value, xtarget, 0);
5114 bitpos = ((tree_low_cst (index, 0) - minelt)
5115 * tree_low_cst (TYPE_SIZE (elttype), 1));
5117 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5119 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5120 && TREE_CODE (type) == ARRAY_TYPE
5121 && TYPE_NONALIASED_COMPONENT (type))
5123 target = copy_rtx (target);
5124 MEM_KEEP_ALIAS_SET_P (target) = 1;
5126 store_constructor_field (target, bitsize, bitpos, mode, value,
5127 type, cleared, get_alias_set (elttype));
5135 unsigned HOST_WIDE_INT idx;
5136 constructor_elt *ce;
5140 tree elttype = TREE_TYPE (type);
5141 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5142 enum machine_mode eltmode = TYPE_MODE (elttype);
5143 HOST_WIDE_INT bitsize;
5144 HOST_WIDE_INT bitpos;
5145 rtvec vector = NULL;
5148 gcc_assert (eltmode != BLKmode);
5150 n_elts = TYPE_VECTOR_SUBPARTS (type);
5151 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5153 enum machine_mode mode = GET_MODE (target);
5155 icode = (int) vec_init_optab->handlers[mode].insn_code;
5156 if (icode != CODE_FOR_nothing)
5160 vector = rtvec_alloc (n_elts);
5161 for (i = 0; i < n_elts; i++)
5162 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5166 /* If the constructor has fewer elements than the vector,
5167 clear the whole array first. Similarly if this is static
5168 constructor of a non-BLKmode object. */
5171 else if (REG_P (target) && TREE_STATIC (exp))
5175 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5178 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5180 int n_elts_here = tree_low_cst
5181 (int_const_binop (TRUNC_DIV_EXPR,
5182 TYPE_SIZE (TREE_TYPE (value)),
5183 TYPE_SIZE (elttype), 0), 1);
5185 count += n_elts_here;
5186 if (mostly_zeros_p (value))
5187 zero_count += n_elts_here;
5190 /* Clear the entire vector first if there are any missing elements,
5191 or if the incidence of zero elements is >= 75%. */
5192 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5195 if (need_to_clear && size > 0 && !vector)
5198 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5200 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5204 /* Inform later passes that the old value is dead. */
5205 if (!cleared && REG_P (target))
5206 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5208 /* Store each element of the constructor into the corresponding
5209 element of TARGET, determined by counting the elements. */
5210 for (idx = 0, i = 0;
5211 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5212 idx++, i += bitsize / elt_size)
5214 HOST_WIDE_INT eltpos;
5215 tree value = ce->value;
5217 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5218 if (cleared && initializer_zerop (value))
5222 eltpos = tree_low_cst (ce->index, 1);
5228 /* Vector CONSTRUCTORs should only be built from smaller
5229 vectors in the case of BLKmode vectors. */
5230 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5231 RTVEC_ELT (vector, eltpos)
5232 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5236 enum machine_mode value_mode =
5237 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5238 ? TYPE_MODE (TREE_TYPE (value))
5240 bitpos = eltpos * elt_size;
5241 store_constructor_field (target, bitsize, bitpos,
5242 value_mode, value, type,
5243 cleared, get_alias_set (elttype));
5248 emit_insn (GEN_FCN (icode)
5250 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5259 /* Store the value of EXP (an expression tree)
5260 into a subfield of TARGET which has mode MODE and occupies
5261 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5262 If MODE is VOIDmode, it means that we are storing into a bit-field.
5264 Always return const0_rtx unless we have something particular to
5267 TYPE is the type of the underlying object,
5269 ALIAS_SET is the alias set for the destination. This value will
5270 (in general) be different from that for TARGET, since TARGET is a
5271 reference to the containing structure. */
5274 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5275 enum machine_mode mode, tree exp, tree type, int alias_set)
5277 HOST_WIDE_INT width_mask = 0;
5279 if (TREE_CODE (exp) == ERROR_MARK)
5282 /* If we have nothing to store, do nothing unless the expression has
5285 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5286 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5287 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5289 /* If we are storing into an unaligned field of an aligned union that is
5290 in a register, we may have the mode of TARGET being an integer mode but
5291 MODE == BLKmode. In that case, get an aligned object whose size and
5292 alignment are the same as TARGET and store TARGET into it (we can avoid
5293 the store if the field being stored is the entire width of TARGET). Then
5294 call ourselves recursively to store the field into a BLKmode version of
5295 that object. Finally, load from the object into TARGET. This is not
5296 very efficient in general, but should only be slightly more expensive
5297 than the otherwise-required unaligned accesses. Perhaps this can be
5298 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5299 twice, once with emit_move_insn and once via store_field. */
5302 && (REG_P (target) || GET_CODE (target) == SUBREG))
5304 rtx object = assign_temp (type, 0, 1, 1);
5305 rtx blk_object = adjust_address (object, BLKmode, 0);
5307 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5308 emit_move_insn (object, target);
5310 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5312 emit_move_insn (target, object);
5314 /* We want to return the BLKmode version of the data. */
5318 if (GET_CODE (target) == CONCAT)
5320 /* We're storing into a struct containing a single __complex. */
5322 gcc_assert (!bitpos);
5323 return store_expr (exp, target, 0);
5326 /* If the structure is in a register or if the component
5327 is a bit field, we cannot use addressing to access it.
5328 Use bit-field techniques or SUBREG to store in it. */
5330 if (mode == VOIDmode
5331 || (mode != BLKmode && ! direct_store[(int) mode]
5332 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5333 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5335 || GET_CODE (target) == SUBREG
5336 /* If the field isn't aligned enough to store as an ordinary memref,
5337 store it as a bit field. */
5339 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5340 || bitpos % GET_MODE_ALIGNMENT (mode))
5341 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5342 || (bitpos % BITS_PER_UNIT != 0)))
5343 /* If the RHS and field are a constant size and the size of the
5344 RHS isn't the same size as the bitfield, we must use bitfield
5347 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5348 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5352 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5353 implies a mask operation. If the precision is the same size as
5354 the field we're storing into, that mask is redundant. This is
5355 particularly common with bit field assignments generated by the
5357 if (TREE_CODE (exp) == NOP_EXPR)
5359 tree type = TREE_TYPE (exp);
5360 if (INTEGRAL_TYPE_P (type)
5361 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5362 && bitsize == TYPE_PRECISION (type))
5364 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5365 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5366 exp = TREE_OPERAND (exp, 0);
5370 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5372 /* If BITSIZE is narrower than the size of the type of EXP
5373 we will be narrowing TEMP. Normally, what's wanted are the
5374 low-order bits. However, if EXP's type is a record and this is
5375 big-endian machine, we want the upper BITSIZE bits. */
5376 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5377 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5378 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5379 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5380 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5384 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5386 if (mode != VOIDmode && mode != BLKmode
5387 && mode != TYPE_MODE (TREE_TYPE (exp)))
5388 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5390 /* If the modes of TARGET and TEMP are both BLKmode, both
5391 must be in memory and BITPOS must be aligned on a byte
5392 boundary. If so, we simply do a block copy. */
5393 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5395 gcc_assert (MEM_P (target) && MEM_P (temp)
5396 && !(bitpos % BITS_PER_UNIT));
5398 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5399 emit_block_move (target, temp,
5400 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5407 /* Store the value in the bitfield. */
5408 store_bit_field (target, bitsize, bitpos, mode, temp);
5414 /* Now build a reference to just the desired component. */
5415 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5417 if (to_rtx == target)
5418 to_rtx = copy_rtx (to_rtx);
5420 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5421 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5422 set_mem_alias_set (to_rtx, alias_set);
5424 return store_expr (exp, to_rtx, 0);
5428 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5429 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5430 codes and find the ultimate containing object, which we return.
5432 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5433 bit position, and *PUNSIGNEDP to the signedness of the field.
5434 If the position of the field is variable, we store a tree
5435 giving the variable offset (in units) in *POFFSET.
5436 This offset is in addition to the bit position.
5437 If the position is not variable, we store 0 in *POFFSET.
5439 If any of the extraction expressions is volatile,
5440 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5442 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5443 is a mode that can be used to access the field. In that case, *PBITSIZE
5446 If the field describes a variable-sized object, *PMODE is set to
5447 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5448 this case, but the address of the object can be found.
5450 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5451 look through nodes that serve as markers of a greater alignment than
5452 the one that can be deduced from the expression. These nodes make it
5453 possible for front-ends to prevent temporaries from being created by
5454 the middle-end on alignment considerations. For that purpose, the
5455 normal operating mode at high-level is to always pass FALSE so that
5456 the ultimate containing object is really returned; moreover, the
5457 associated predicate handled_component_p will always return TRUE
5458 on these nodes, thus indicating that they are essentially handled
5459 by get_inner_reference. TRUE should only be passed when the caller
5460 is scanning the expression in order to build another representation
5461 and specifically knows how to handle these nodes; as such, this is
5462 the normal operating mode in the RTL expanders. */
5465 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5466 HOST_WIDE_INT *pbitpos, tree *poffset,
5467 enum machine_mode *pmode, int *punsignedp,
5468 int *pvolatilep, bool keep_aligning)
5471 enum machine_mode mode = VOIDmode;
5472 tree offset = size_zero_node;
5473 tree bit_offset = bitsize_zero_node;
5476 /* First get the mode, signedness, and size. We do this from just the
5477 outermost expression. */
5478 if (TREE_CODE (exp) == COMPONENT_REF)
5480 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5481 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5482 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5484 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5486 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5488 size_tree = TREE_OPERAND (exp, 1);
5489 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5493 mode = TYPE_MODE (TREE_TYPE (exp));
5494 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5496 if (mode == BLKmode)
5497 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5499 *pbitsize = GET_MODE_BITSIZE (mode);
5504 if (! host_integerp (size_tree, 1))
5505 mode = BLKmode, *pbitsize = -1;
5507 *pbitsize = tree_low_cst (size_tree, 1);
5510 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5511 and find the ultimate containing object. */
5514 switch (TREE_CODE (exp))
5517 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5518 TREE_OPERAND (exp, 2));
5523 tree field = TREE_OPERAND (exp, 1);
5524 tree this_offset = component_ref_field_offset (exp);
5526 /* If this field hasn't been filled in yet, don't go past it.
5527 This should only happen when folding expressions made during
5528 type construction. */
5529 if (this_offset == 0)
5532 offset = size_binop (PLUS_EXPR, offset, this_offset);
5533 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5534 DECL_FIELD_BIT_OFFSET (field));
5536 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5541 case ARRAY_RANGE_REF:
5543 tree index = TREE_OPERAND (exp, 1);
5544 tree low_bound = array_ref_low_bound (exp);
5545 tree unit_size = array_ref_element_size (exp);
5547 /* We assume all arrays have sizes that are a multiple of a byte.
5548 First subtract the lower bound, if any, in the type of the
5549 index, then convert to sizetype and multiply by the size of
5550 the array element. */
5551 if (! integer_zerop (low_bound))
5552 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5555 offset = size_binop (PLUS_EXPR, offset,
5556 size_binop (MULT_EXPR,
5557 convert (sizetype, index),
5566 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5567 bitsize_int (*pbitsize));
5570 case VIEW_CONVERT_EXPR:
5571 if (keep_aligning && STRICT_ALIGNMENT
5572 && (TYPE_ALIGN (TREE_TYPE (exp))
5573 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5574 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5575 < BIGGEST_ALIGNMENT)
5576 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5577 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5585 /* If any reference in the chain is volatile, the effect is volatile. */
5586 if (TREE_THIS_VOLATILE (exp))
5589 exp = TREE_OPERAND (exp, 0);
5593 /* If OFFSET is constant, see if we can return the whole thing as a
5594 constant bit position. Otherwise, split it up. */
5595 if (host_integerp (offset, 0)
5596 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5598 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5599 && host_integerp (tem, 0))
5600 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5602 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5608 /* Return a tree of sizetype representing the size, in bytes, of the element
5609 of EXP, an ARRAY_REF. */
5612 array_ref_element_size (tree exp)
5614 tree aligned_size = TREE_OPERAND (exp, 3);
5615 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5617 /* If a size was specified in the ARRAY_REF, it's the size measured
5618 in alignment units of the element type. So multiply by that value. */
5621 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5622 sizetype from another type of the same width and signedness. */
5623 if (TREE_TYPE (aligned_size) != sizetype)
5624 aligned_size = fold_convert (sizetype, aligned_size);
5625 return size_binop (MULT_EXPR, aligned_size,
5626 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5629 /* Otherwise, take the size from that of the element type. Substitute
5630 any PLACEHOLDER_EXPR that we have. */
5632 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5635 /* Return a tree representing the lower bound of the array mentioned in
5636 EXP, an ARRAY_REF. */
5639 array_ref_low_bound (tree exp)
5641 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5643 /* If a lower bound is specified in EXP, use it. */
5644 if (TREE_OPERAND (exp, 2))
5645 return TREE_OPERAND (exp, 2);
5647 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5648 substituting for a PLACEHOLDER_EXPR as needed. */
5649 if (domain_type && TYPE_MIN_VALUE (domain_type))
5650 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5652 /* Otherwise, return a zero of the appropriate type. */
5653 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5656 /* Return a tree representing the upper bound of the array mentioned in
5657 EXP, an ARRAY_REF. */
5660 array_ref_up_bound (tree exp)
5662 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5664 /* If there is a domain type and it has an upper bound, use it, substituting
5665 for a PLACEHOLDER_EXPR as needed. */
5666 if (domain_type && TYPE_MAX_VALUE (domain_type))
5667 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5669 /* Otherwise fail. */
5673 /* Return a tree representing the offset, in bytes, of the field referenced
5674 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5677 component_ref_field_offset (tree exp)
5679 tree aligned_offset = TREE_OPERAND (exp, 2);
5680 tree field = TREE_OPERAND (exp, 1);
5682 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5683 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5687 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5688 sizetype from another type of the same width and signedness. */
5689 if (TREE_TYPE (aligned_offset) != sizetype)
5690 aligned_offset = fold_convert (sizetype, aligned_offset);
5691 return size_binop (MULT_EXPR, aligned_offset,
5692 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5695 /* Otherwise, take the offset from that of the field. Substitute
5696 any PLACEHOLDER_EXPR that we have. */
5698 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5701 /* Return 1 if T is an expression that get_inner_reference handles. */
5704 handled_component_p (tree t)
5706 switch (TREE_CODE (t))
5711 case ARRAY_RANGE_REF:
5712 case VIEW_CONVERT_EXPR:
5722 /* Given an rtx VALUE that may contain additions and multiplications, return
5723 an equivalent value that just refers to a register, memory, or constant.
5724 This is done by generating instructions to perform the arithmetic and
5725 returning a pseudo-register containing the value.
5727 The returned value may be a REG, SUBREG, MEM or constant. */
5730 force_operand (rtx value, rtx target)
5733 /* Use subtarget as the target for operand 0 of a binary operation. */
5734 rtx subtarget = get_subtarget (target);
5735 enum rtx_code code = GET_CODE (value);
5737 /* Check for subreg applied to an expression produced by loop optimizer. */
5739 && !REG_P (SUBREG_REG (value))
5740 && !MEM_P (SUBREG_REG (value)))
5742 value = simplify_gen_subreg (GET_MODE (value),
5743 force_reg (GET_MODE (SUBREG_REG (value)),
5744 force_operand (SUBREG_REG (value),
5746 GET_MODE (SUBREG_REG (value)),
5747 SUBREG_BYTE (value));
5748 code = GET_CODE (value);
5751 /* Check for a PIC address load. */
5752 if ((code == PLUS || code == MINUS)
5753 && XEXP (value, 0) == pic_offset_table_rtx
5754 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5755 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5756 || GET_CODE (XEXP (value, 1)) == CONST))
5759 subtarget = gen_reg_rtx (GET_MODE (value));
5760 emit_move_insn (subtarget, value);
5764 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5767 target = gen_reg_rtx (GET_MODE (value));
5768 convert_move (target, force_operand (XEXP (value, 0), NULL),
5769 code == ZERO_EXTEND);
5773 if (ARITHMETIC_P (value))
5775 op2 = XEXP (value, 1);
5776 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5778 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5781 op2 = negate_rtx (GET_MODE (value), op2);
5784 /* Check for an addition with OP2 a constant integer and our first
5785 operand a PLUS of a virtual register and something else. In that
5786 case, we want to emit the sum of the virtual register and the
5787 constant first and then add the other value. This allows virtual
5788 register instantiation to simply modify the constant rather than
5789 creating another one around this addition. */
5790 if (code == PLUS && GET_CODE (op2) == CONST_INT
5791 && GET_CODE (XEXP (value, 0)) == PLUS
5792 && REG_P (XEXP (XEXP (value, 0), 0))
5793 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5794 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5796 rtx temp = expand_simple_binop (GET_MODE (value), code,
5797 XEXP (XEXP (value, 0), 0), op2,
5798 subtarget, 0, OPTAB_LIB_WIDEN);
5799 return expand_simple_binop (GET_MODE (value), code, temp,
5800 force_operand (XEXP (XEXP (value,
5802 target, 0, OPTAB_LIB_WIDEN);
5805 op1 = force_operand (XEXP (value, 0), subtarget);
5806 op2 = force_operand (op2, NULL_RTX);
5810 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5812 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5813 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5814 target, 1, OPTAB_LIB_WIDEN);
5816 return expand_divmod (0,
5817 FLOAT_MODE_P (GET_MODE (value))
5818 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5819 GET_MODE (value), op1, op2, target, 0);
5822 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5826 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5830 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5834 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5835 target, 0, OPTAB_LIB_WIDEN);
5838 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5839 target, 1, OPTAB_LIB_WIDEN);
5842 if (UNARY_P (value))
5844 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5845 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5848 #ifdef INSN_SCHEDULING
5849 /* On machines that have insn scheduling, we want all memory reference to be
5850 explicit, so we need to deal with such paradoxical SUBREGs. */
5851 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5852 && (GET_MODE_SIZE (GET_MODE (value))
5853 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5855 = simplify_gen_subreg (GET_MODE (value),
5856 force_reg (GET_MODE (SUBREG_REG (value)),
5857 force_operand (SUBREG_REG (value),
5859 GET_MODE (SUBREG_REG (value)),
5860 SUBREG_BYTE (value));
5866 /* Subroutine of expand_expr: return nonzero iff there is no way that
5867 EXP can reference X, which is being modified. TOP_P is nonzero if this
5868 call is going to be used to determine whether we need a temporary
5869 for EXP, as opposed to a recursive call to this function.
5871 It is always safe for this routine to return zero since it merely
5872 searches for optimization opportunities. */
5875 safe_from_p (rtx x, tree exp, int top_p)
5881 /* If EXP has varying size, we MUST use a target since we currently
5882 have no way of allocating temporaries of variable size
5883 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5884 So we assume here that something at a higher level has prevented a
5885 clash. This is somewhat bogus, but the best we can do. Only
5886 do this when X is BLKmode and when we are at the top level. */
5887 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5888 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5889 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5890 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5891 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5893 && GET_MODE (x) == BLKmode)
5894 /* If X is in the outgoing argument area, it is always safe. */
5896 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5897 || (GET_CODE (XEXP (x, 0)) == PLUS
5898 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5901 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5902 find the underlying pseudo. */
5903 if (GET_CODE (x) == SUBREG)
5906 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5910 /* Now look at our tree code and possibly recurse. */
5911 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5913 case tcc_declaration:
5914 exp_rtl = DECL_RTL_IF_SET (exp);
5920 case tcc_exceptional:
5921 if (TREE_CODE (exp) == TREE_LIST)
5925 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5927 exp = TREE_CHAIN (exp);
5930 if (TREE_CODE (exp) != TREE_LIST)
5931 return safe_from_p (x, exp, 0);
5934 else if (TREE_CODE (exp) == ERROR_MARK)
5935 return 1; /* An already-visited SAVE_EXPR? */
5940 /* The only case we look at here is the DECL_INITIAL inside a
5942 return (TREE_CODE (exp) != DECL_EXPR
5943 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5944 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5945 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5948 case tcc_comparison:
5949 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5954 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5956 case tcc_expression:
5958 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5959 the expression. If it is set, we conflict iff we are that rtx or
5960 both are in memory. Otherwise, we check all operands of the
5961 expression recursively. */
5963 switch (TREE_CODE (exp))
5966 /* If the operand is static or we are static, we can't conflict.
5967 Likewise if we don't conflict with the operand at all. */
5968 if (staticp (TREE_OPERAND (exp, 0))
5969 || TREE_STATIC (exp)
5970 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5973 /* Otherwise, the only way this can conflict is if we are taking
5974 the address of a DECL a that address if part of X, which is
5976 exp = TREE_OPERAND (exp, 0);
5979 if (!DECL_RTL_SET_P (exp)
5980 || !MEM_P (DECL_RTL (exp)))
5983 exp_rtl = XEXP (DECL_RTL (exp), 0);
5987 case MISALIGNED_INDIRECT_REF:
5988 case ALIGN_INDIRECT_REF:
5991 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5992 get_alias_set (exp)))
5997 /* Assume that the call will clobber all hard registers and
5999 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6004 case WITH_CLEANUP_EXPR:
6005 case CLEANUP_POINT_EXPR:
6006 /* Lowered by gimplify.c. */
6010 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6016 /* If we have an rtx, we do not need to scan our operands. */
6020 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6021 for (i = 0; i < nops; i++)
6022 if (TREE_OPERAND (exp, i) != 0
6023 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6026 /* If this is a language-specific tree code, it may require
6027 special handling. */
6028 if ((unsigned int) TREE_CODE (exp)
6029 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6030 && !lang_hooks.safe_from_p (x, exp))
6035 /* Should never get a type here. */
6039 /* If we have an rtl, find any enclosed object. Then see if we conflict
6043 if (GET_CODE (exp_rtl) == SUBREG)
6045 exp_rtl = SUBREG_REG (exp_rtl);
6047 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6051 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6052 are memory and they conflict. */
6053 return ! (rtx_equal_p (x, exp_rtl)
6054 || (MEM_P (x) && MEM_P (exp_rtl)
6055 && true_dependence (exp_rtl, VOIDmode, x,
6056 rtx_addr_varies_p)));
6059 /* If we reach here, it is safe. */
6064 /* Return the highest power of two that EXP is known to be a multiple of.
6065 This is used in updating alignment of MEMs in array references. */
6067 unsigned HOST_WIDE_INT
6068 highest_pow2_factor (tree exp)
6070 unsigned HOST_WIDE_INT c0, c1;
6072 switch (TREE_CODE (exp))
6075 /* We can find the lowest bit that's a one. If the low
6076 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6077 We need to handle this case since we can find it in a COND_EXPR,
6078 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6079 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6081 if (TREE_CONSTANT_OVERFLOW (exp))
6082 return BIGGEST_ALIGNMENT;
6085 /* Note: tree_low_cst is intentionally not used here,
6086 we don't care about the upper bits. */
6087 c0 = TREE_INT_CST_LOW (exp);
6089 return c0 ? c0 : BIGGEST_ALIGNMENT;
6093 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6094 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6095 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6096 return MIN (c0, c1);
6099 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6100 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6103 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6105 if (integer_pow2p (TREE_OPERAND (exp, 1))
6106 && host_integerp (TREE_OPERAND (exp, 1), 1))
6108 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6109 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6110 return MAX (1, c0 / c1);
6114 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6116 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6119 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6122 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6123 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6124 return MIN (c0, c1);
6133 /* Similar, except that the alignment requirements of TARGET are
6134 taken into account. Assume it is at least as aligned as its
6135 type, unless it is a COMPONENT_REF in which case the layout of
6136 the structure gives the alignment. */
6138 static unsigned HOST_WIDE_INT
6139 highest_pow2_factor_for_target (tree target, tree exp)
6141 unsigned HOST_WIDE_INT target_align, factor;
6143 factor = highest_pow2_factor (exp);
6144 if (TREE_CODE (target) == COMPONENT_REF)
6145 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6147 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6148 return MAX (factor, target_align);
6151 /* Expands variable VAR. */
6154 expand_var (tree var)
6156 if (DECL_EXTERNAL (var))
6159 if (TREE_STATIC (var))
6160 /* If this is an inlined copy of a static local variable,
6161 look up the original decl. */
6162 var = DECL_ORIGIN (var);
6164 if (TREE_STATIC (var)
6165 ? !TREE_ASM_WRITTEN (var)
6166 : !DECL_RTL_SET_P (var))
6168 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6169 /* Should be ignored. */;
6170 else if (lang_hooks.expand_decl (var))
6172 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6174 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6175 rest_of_decl_compilation (var, 0, 0);
6177 /* No expansion needed. */
6178 gcc_assert (TREE_CODE (var) == TYPE_DECL
6179 || TREE_CODE (var) == CONST_DECL
6180 || TREE_CODE (var) == FUNCTION_DECL
6181 || TREE_CODE (var) == LABEL_DECL);
6185 /* Subroutine of expand_expr. Expand the two operands of a binary
6186 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6187 The value may be stored in TARGET if TARGET is nonzero. The
6188 MODIFIER argument is as documented by expand_expr. */
6191 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6192 enum expand_modifier modifier)
6194 if (! safe_from_p (target, exp1, 1))
6196 if (operand_equal_p (exp0, exp1, 0))
6198 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6199 *op1 = copy_rtx (*op0);
6203 /* If we need to preserve evaluation order, copy exp0 into its own
6204 temporary variable so that it can't be clobbered by exp1. */
6205 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6206 exp0 = save_expr (exp0);
6207 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6208 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6213 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6214 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6217 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6218 enum expand_modifier modifier)
6220 rtx result, subtarget;
6222 HOST_WIDE_INT bitsize, bitpos;
6223 int volatilep, unsignedp;
6224 enum machine_mode mode1;
6226 /* If we are taking the address of a constant and are at the top level,
6227 we have to use output_constant_def since we can't call force_const_mem
6229 /* ??? This should be considered a front-end bug. We should not be
6230 generating ADDR_EXPR of something that isn't an LVALUE. The only
6231 exception here is STRING_CST. */
6232 if (TREE_CODE (exp) == CONSTRUCTOR
6233 || CONSTANT_CLASS_P (exp))
6234 return XEXP (output_constant_def (exp, 0), 0);
6236 /* Everything must be something allowed by is_gimple_addressable. */
6237 switch (TREE_CODE (exp))
6240 /* This case will happen via recursion for &a->b. */
6241 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6244 /* Recurse and make the output_constant_def clause above handle this. */
6245 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6249 /* The real part of the complex number is always first, therefore
6250 the address is the same as the address of the parent object. */
6253 inner = TREE_OPERAND (exp, 0);
6257 /* The imaginary part of the complex number is always second.
6258 The expression is therefore always offset by the size of the
6261 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6262 inner = TREE_OPERAND (exp, 0);
6266 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6267 expand_expr, as that can have various side effects; LABEL_DECLs for
6268 example, may not have their DECL_RTL set yet. Assume language
6269 specific tree nodes can be expanded in some interesting way. */
6271 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6273 result = expand_expr (exp, target, tmode,
6274 modifier == EXPAND_INITIALIZER
6275 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6277 /* If the DECL isn't in memory, then the DECL wasn't properly
6278 marked TREE_ADDRESSABLE, which will be either a front-end
6279 or a tree optimizer bug. */
6280 gcc_assert (MEM_P (result));
6281 result = XEXP (result, 0);
6283 /* ??? Is this needed anymore? */
6284 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6286 assemble_external (exp);
6287 TREE_USED (exp) = 1;
6290 if (modifier != EXPAND_INITIALIZER
6291 && modifier != EXPAND_CONST_ADDRESS)
6292 result = force_operand (result, target);
6296 /* Pass FALSE as the last argument to get_inner_reference although
6297 we are expanding to RTL. The rationale is that we know how to
6298 handle "aligning nodes" here: we can just bypass them because
6299 they won't change the final object whose address will be returned
6300 (they actually exist only for that purpose). */
6301 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6302 &mode1, &unsignedp, &volatilep, false);
6306 /* We must have made progress. */
6307 gcc_assert (inner != exp);
6309 subtarget = offset || bitpos ? NULL_RTX : target;
6310 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6316 if (modifier != EXPAND_NORMAL)
6317 result = force_operand (result, NULL);
6318 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6320 result = convert_memory_address (tmode, result);
6321 tmp = convert_memory_address (tmode, tmp);
6323 if (modifier == EXPAND_SUM)
6324 result = gen_rtx_PLUS (tmode, result, tmp);
6327 subtarget = bitpos ? NULL_RTX : target;
6328 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6329 1, OPTAB_LIB_WIDEN);
6335 /* Someone beforehand should have rejected taking the address
6336 of such an object. */
6337 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6339 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6340 if (modifier < EXPAND_SUM)
6341 result = force_operand (result, target);
6347 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6348 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6351 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6352 enum expand_modifier modifier)
6354 enum machine_mode rmode;
6357 /* Target mode of VOIDmode says "whatever's natural". */
6358 if (tmode == VOIDmode)
6359 tmode = TYPE_MODE (TREE_TYPE (exp));
6361 /* We can get called with some Weird Things if the user does silliness
6362 like "(short) &a". In that case, convert_memory_address won't do
6363 the right thing, so ignore the given target mode. */
6364 if (tmode != Pmode && tmode != ptr_mode)
6367 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6370 /* Despite expand_expr claims concerning ignoring TMODE when not
6371 strictly convenient, stuff breaks if we don't honor it. Note
6372 that combined with the above, we only do this for pointer modes. */
6373 rmode = GET_MODE (result);
6374 if (rmode == VOIDmode)
6377 result = convert_memory_address (tmode, result);
6383 /* expand_expr: generate code for computing expression EXP.
6384 An rtx for the computed value is returned. The value is never null.
6385 In the case of a void EXP, const0_rtx is returned.
6387 The value may be stored in TARGET if TARGET is nonzero.
6388 TARGET is just a suggestion; callers must assume that
6389 the rtx returned may not be the same as TARGET.
6391 If TARGET is CONST0_RTX, it means that the value will be ignored.
6393 If TMODE is not VOIDmode, it suggests generating the
6394 result in mode TMODE. But this is done only when convenient.
6395 Otherwise, TMODE is ignored and the value generated in its natural mode.
6396 TMODE is just a suggestion; callers must assume that
6397 the rtx returned may not have mode TMODE.
6399 Note that TARGET may have neither TMODE nor MODE. In that case, it
6400 probably will not be used.
6402 If MODIFIER is EXPAND_SUM then when EXP is an addition
6403 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6404 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6405 products as above, or REG or MEM, or constant.
6406 Ordinarily in such cases we would output mul or add instructions
6407 and then return a pseudo reg containing the sum.
6409 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6410 it also marks a label as absolutely required (it can't be dead).
6411 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6412 This is used for outputting expressions used in initializers.
6414 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6415 with a constant address even if that address is not normally legitimate.
6416 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6418 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6419 a call parameter. Such targets require special care as we haven't yet
6420 marked TARGET so that it's safe from being trashed by libcalls. We
6421 don't want to use TARGET for anything but the final result;
6422 Intermediate values must go elsewhere. Additionally, calls to
6423 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6425 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6426 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6427 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6428 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6431 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6432 enum expand_modifier, rtx *);
6435 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6436 enum expand_modifier modifier, rtx *alt_rtl)
6439 rtx ret, last = NULL;
6441 /* Handle ERROR_MARK before anybody tries to access its type. */
6442 if (TREE_CODE (exp) == ERROR_MARK
6443 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6445 ret = CONST0_RTX (tmode);
6446 return ret ? ret : const0_rtx;
6449 if (flag_non_call_exceptions)
6451 rn = lookup_stmt_eh_region (exp);
6452 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6454 last = get_last_insn ();
6457 /* If this is an expression of some kind and it has an associated line
6458 number, then emit the line number before expanding the expression.
6460 We need to save and restore the file and line information so that
6461 errors discovered during expansion are emitted with the right
6462 information. It would be better of the diagnostic routines
6463 used the file/line information embedded in the tree nodes rather
6465 if (cfun && EXPR_HAS_LOCATION (exp))
6467 location_t saved_location = input_location;
6468 input_location = EXPR_LOCATION (exp);
6469 emit_line_note (input_location);
6471 /* Record where the insns produced belong. */
6472 record_block_change (TREE_BLOCK (exp));
6474 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6476 input_location = saved_location;
6480 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6483 /* If using non-call exceptions, mark all insns that may trap.
6484 expand_call() will mark CALL_INSNs before we get to this code,
6485 but it doesn't handle libcalls, and these may trap. */
6489 for (insn = next_real_insn (last); insn;
6490 insn = next_real_insn (insn))
6492 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6493 /* If we want exceptions for non-call insns, any
6494 may_trap_p instruction may throw. */
6495 && GET_CODE (PATTERN (insn)) != CLOBBER
6496 && GET_CODE (PATTERN (insn)) != USE
6497 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6499 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6509 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6510 enum expand_modifier modifier, rtx *alt_rtl)
6513 tree type = TREE_TYPE (exp);
6515 enum machine_mode mode;
6516 enum tree_code code = TREE_CODE (exp);
6518 rtx subtarget, original_target;
6521 bool reduce_bit_field = false;
6522 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6523 ? reduce_to_bit_field_precision ((expr), \
6528 mode = TYPE_MODE (type);
6529 unsignedp = TYPE_UNSIGNED (type);
6530 if (lang_hooks.reduce_bit_field_operations
6531 && TREE_CODE (type) == INTEGER_TYPE
6532 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6534 /* An operation in what may be a bit-field type needs the
6535 result to be reduced to the precision of the bit-field type,
6536 which is narrower than that of the type's mode. */
6537 reduce_bit_field = true;
6538 if (modifier == EXPAND_STACK_PARM)
6542 /* Use subtarget as the target for operand 0 of a binary operation. */
6543 subtarget = get_subtarget (target);
6544 original_target = target;
6545 ignore = (target == const0_rtx
6546 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6547 || code == CONVERT_EXPR || code == COND_EXPR
6548 || code == VIEW_CONVERT_EXPR)
6549 && TREE_CODE (type) == VOID_TYPE));
6551 /* If we are going to ignore this result, we need only do something
6552 if there is a side-effect somewhere in the expression. If there
6553 is, short-circuit the most common cases here. Note that we must
6554 not call expand_expr with anything but const0_rtx in case this
6555 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6559 if (! TREE_SIDE_EFFECTS (exp))
6562 /* Ensure we reference a volatile object even if value is ignored, but
6563 don't do this if all we are doing is taking its address. */
6564 if (TREE_THIS_VOLATILE (exp)
6565 && TREE_CODE (exp) != FUNCTION_DECL
6566 && mode != VOIDmode && mode != BLKmode
6567 && modifier != EXPAND_CONST_ADDRESS)
6569 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6571 temp = copy_to_reg (temp);
6575 if (TREE_CODE_CLASS (code) == tcc_unary
6576 || code == COMPONENT_REF || code == INDIRECT_REF)
6577 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6580 else if (TREE_CODE_CLASS (code) == tcc_binary
6581 || TREE_CODE_CLASS (code) == tcc_comparison
6582 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6584 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6585 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6588 else if (code == BIT_FIELD_REF)
6590 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6591 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6592 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6604 tree function = decl_function_context (exp);
6606 temp = label_rtx (exp);
6607 temp = gen_rtx_LABEL_REF (Pmode, temp);
6609 if (function != current_function_decl
6611 LABEL_REF_NONLOCAL_P (temp) = 1;
6613 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6618 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6623 /* If a static var's type was incomplete when the decl was written,
6624 but the type is complete now, lay out the decl now. */
6625 if (DECL_SIZE (exp) == 0
6626 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6627 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6628 layout_decl (exp, 0);
6630 /* ... fall through ... */
6634 gcc_assert (DECL_RTL (exp));
6636 /* Ensure variable marked as used even if it doesn't go through
6637 a parser. If it hasn't be used yet, write out an external
6639 if (! TREE_USED (exp))
6641 assemble_external (exp);
6642 TREE_USED (exp) = 1;
6645 /* Show we haven't gotten RTL for this yet. */
6648 /* Variables inherited from containing functions should have
6649 been lowered by this point. */
6650 context = decl_function_context (exp);
6651 gcc_assert (!context
6652 || context == current_function_decl
6653 || TREE_STATIC (exp)
6654 /* ??? C++ creates functions that are not TREE_STATIC. */
6655 || TREE_CODE (exp) == FUNCTION_DECL);
6657 /* This is the case of an array whose size is to be determined
6658 from its initializer, while the initializer is still being parsed.
6661 if (MEM_P (DECL_RTL (exp))
6662 && REG_P (XEXP (DECL_RTL (exp), 0)))
6663 temp = validize_mem (DECL_RTL (exp));
6665 /* If DECL_RTL is memory, we are in the normal case and either
6666 the address is not valid or it is not a register and -fforce-addr
6667 is specified, get the address into a register. */
6669 else if (MEM_P (DECL_RTL (exp))
6670 && modifier != EXPAND_CONST_ADDRESS
6671 && modifier != EXPAND_SUM
6672 && modifier != EXPAND_INITIALIZER
6673 && (! memory_address_p (DECL_MODE (exp),
6674 XEXP (DECL_RTL (exp), 0))
6676 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6679 *alt_rtl = DECL_RTL (exp);
6680 temp = replace_equiv_address (DECL_RTL (exp),
6681 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6684 /* If we got something, return it. But first, set the alignment
6685 if the address is a register. */
6688 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6689 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6694 /* If the mode of DECL_RTL does not match that of the decl, it
6695 must be a promoted value. We return a SUBREG of the wanted mode,
6696 but mark it so that we know that it was already extended. */
6698 if (REG_P (DECL_RTL (exp))
6699 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6701 enum machine_mode pmode;
6703 /* Get the signedness used for this variable. Ensure we get the
6704 same mode we got when the variable was declared. */
6705 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6706 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6707 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6709 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6710 SUBREG_PROMOTED_VAR_P (temp) = 1;
6711 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6715 return DECL_RTL (exp);
6718 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6719 TREE_INT_CST_HIGH (exp), mode);
6721 /* ??? If overflow is set, fold will have done an incomplete job,
6722 which can result in (plus xx (const_int 0)), which can get
6723 simplified by validate_replace_rtx during virtual register
6724 instantiation, which can result in unrecognizable insns.
6725 Avoid this by forcing all overflows into registers. */
6726 if (TREE_CONSTANT_OVERFLOW (exp)
6727 && modifier != EXPAND_INITIALIZER)
6728 temp = force_reg (mode, temp);
6733 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6734 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6735 return const_vector_from_tree (exp);
6737 return expand_expr (build_constructor_from_list
6739 TREE_VECTOR_CST_ELTS (exp)),
6740 ignore ? const0_rtx : target, tmode, modifier);
6743 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6746 /* If optimized, generate immediate CONST_DOUBLE
6747 which will be turned into memory by reload if necessary.
6749 We used to force a register so that loop.c could see it. But
6750 this does not allow gen_* patterns to perform optimizations with
6751 the constants. It also produces two insns in cases like "x = 1.0;".
6752 On most machines, floating-point constants are not permitted in
6753 many insns, so we'd end up copying it to a register in any case.
6755 Now, we do the copying in expand_binop, if appropriate. */
6756 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6757 TYPE_MODE (TREE_TYPE (exp)));
6760 /* Handle evaluating a complex constant in a CONCAT target. */
6761 if (original_target && GET_CODE (original_target) == CONCAT)
6763 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6766 rtarg = XEXP (original_target, 0);
6767 itarg = XEXP (original_target, 1);
6769 /* Move the real and imaginary parts separately. */
6770 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6771 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6774 emit_move_insn (rtarg, op0);
6776 emit_move_insn (itarg, op1);
6778 return original_target;
6781 /* ... fall through ... */
6784 temp = output_constant_def (exp, 1);
6786 /* temp contains a constant address.
6787 On RISC machines where a constant address isn't valid,
6788 make some insns to get that address into a register. */
6789 if (modifier != EXPAND_CONST_ADDRESS
6790 && modifier != EXPAND_INITIALIZER
6791 && modifier != EXPAND_SUM
6792 && (! memory_address_p (mode, XEXP (temp, 0))
6793 || flag_force_addr))
6794 return replace_equiv_address (temp,
6795 copy_rtx (XEXP (temp, 0)));
6800 tree val = TREE_OPERAND (exp, 0);
6801 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6803 if (!SAVE_EXPR_RESOLVED_P (exp))
6805 /* We can indeed still hit this case, typically via builtin
6806 expanders calling save_expr immediately before expanding
6807 something. Assume this means that we only have to deal
6808 with non-BLKmode values. */
6809 gcc_assert (GET_MODE (ret) != BLKmode);
6811 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6812 DECL_ARTIFICIAL (val) = 1;
6813 DECL_IGNORED_P (val) = 1;
6814 TREE_OPERAND (exp, 0) = val;
6815 SAVE_EXPR_RESOLVED_P (exp) = 1;
6817 if (!CONSTANT_P (ret))
6818 ret = copy_to_reg (ret);
6819 SET_DECL_RTL (val, ret);
6826 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6827 expand_goto (TREE_OPERAND (exp, 0));
6829 expand_computed_goto (TREE_OPERAND (exp, 0));
6833 /* If we don't need the result, just ensure we evaluate any
6837 unsigned HOST_WIDE_INT idx;
6840 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6841 expand_expr (value, const0_rtx, VOIDmode, 0);
6846 /* All elts simple constants => refer to a constant in memory. But
6847 if this is a non-BLKmode mode, let it store a field at a time
6848 since that should make a CONST_INT or CONST_DOUBLE when we
6849 fold. Likewise, if we have a target we can use, it is best to
6850 store directly into the target unless the type is large enough
6851 that memcpy will be used. If we are making an initializer and
6852 all operands are constant, put it in memory as well.
6854 FIXME: Avoid trying to fill vector constructors piece-meal.
6855 Output them with output_constant_def below unless we're sure
6856 they're zeros. This should go away when vector initializers
6857 are treated like VECTOR_CST instead of arrays.
6859 else if ((TREE_STATIC (exp)
6860 && ((mode == BLKmode
6861 && ! (target != 0 && safe_from_p (target, exp, 1)))
6862 || TREE_ADDRESSABLE (exp)
6863 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6864 && (! MOVE_BY_PIECES_P
6865 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6867 && ! mostly_zeros_p (exp))))
6868 || ((modifier == EXPAND_INITIALIZER
6869 || modifier == EXPAND_CONST_ADDRESS)
6870 && TREE_CONSTANT (exp)))
6872 rtx constructor = output_constant_def (exp, 1);
6874 if (modifier != EXPAND_CONST_ADDRESS
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_SUM)
6877 constructor = validize_mem (constructor);
6883 /* Handle calls that pass values in multiple non-contiguous
6884 locations. The Irix 6 ABI has examples of this. */
6885 if (target == 0 || ! safe_from_p (target, exp, 1)
6886 || GET_CODE (target) == PARALLEL
6887 || modifier == EXPAND_STACK_PARM)
6889 = assign_temp (build_qualified_type (type,
6891 | (TREE_READONLY (exp)
6892 * TYPE_QUAL_CONST))),
6893 0, TREE_ADDRESSABLE (exp), 1);
6895 store_constructor (exp, target, 0, int_expr_size (exp));
6899 case MISALIGNED_INDIRECT_REF:
6900 case ALIGN_INDIRECT_REF:
6903 tree exp1 = TREE_OPERAND (exp, 0);
6905 if (modifier != EXPAND_WRITE)
6909 t = fold_read_from_constant_string (exp);
6911 return expand_expr (t, target, tmode, modifier);
6914 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6915 op0 = memory_address (mode, op0);
6917 if (code == ALIGN_INDIRECT_REF)
6919 int align = TYPE_ALIGN_UNIT (type);
6920 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6921 op0 = memory_address (mode, op0);
6924 temp = gen_rtx_MEM (mode, op0);
6926 set_mem_attributes (temp, exp, 0);
6928 /* Resolve the misalignment now, so that we don't have to remember
6929 to resolve it later. Of course, this only works for reads. */
6930 /* ??? When we get around to supporting writes, we'll have to handle
6931 this in store_expr directly. The vectorizer isn't generating
6932 those yet, however. */
6933 if (code == MISALIGNED_INDIRECT_REF)
6938 gcc_assert (modifier == EXPAND_NORMAL);
6940 /* The vectorizer should have already checked the mode. */
6941 icode = movmisalign_optab->handlers[mode].insn_code;
6942 gcc_assert (icode != CODE_FOR_nothing);
6944 /* We've already validated the memory, and we're creating a
6945 new pseudo destination. The predicates really can't fail. */
6946 reg = gen_reg_rtx (mode);
6948 /* Nor can the insn generator. */
6949 insn = GEN_FCN (icode) (reg, temp);
6958 case TARGET_MEM_REF:
6960 struct mem_address addr;
6962 get_address_description (exp, &addr);
6963 op0 = addr_for_mem_ref (&addr, true);
6964 op0 = memory_address (mode, op0);
6965 temp = gen_rtx_MEM (mode, op0);
6966 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
6973 tree array = TREE_OPERAND (exp, 0);
6974 tree index = TREE_OPERAND (exp, 1);
6976 /* Fold an expression like: "foo"[2].
6977 This is not done in fold so it won't happen inside &.
6978 Don't fold if this is for wide characters since it's too
6979 difficult to do correctly and this is a very rare case. */
6981 if (modifier != EXPAND_CONST_ADDRESS
6982 && modifier != EXPAND_INITIALIZER
6983 && modifier != EXPAND_MEMORY)
6985 tree t = fold_read_from_constant_string (exp);
6988 return expand_expr (t, target, tmode, modifier);
6991 /* If this is a constant index into a constant array,
6992 just get the value from the array. Handle both the cases when
6993 we have an explicit constructor and when our operand is a variable
6994 that was declared const. */
6996 if (modifier != EXPAND_CONST_ADDRESS
6997 && modifier != EXPAND_INITIALIZER
6998 && modifier != EXPAND_MEMORY
6999 && TREE_CODE (array) == CONSTRUCTOR
7000 && ! TREE_SIDE_EFFECTS (array)
7001 && TREE_CODE (index) == INTEGER_CST)
7003 unsigned HOST_WIDE_INT ix;
7006 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7008 if (tree_int_cst_equal (field, index))
7010 if (!TREE_SIDE_EFFECTS (value))
7011 return expand_expr (fold (value), target, tmode, modifier);
7016 else if (optimize >= 1
7017 && modifier != EXPAND_CONST_ADDRESS
7018 && modifier != EXPAND_INITIALIZER
7019 && modifier != EXPAND_MEMORY
7020 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7021 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7022 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7023 && targetm.binds_local_p (array))
7025 if (TREE_CODE (index) == INTEGER_CST)
7027 tree init = DECL_INITIAL (array);
7029 if (TREE_CODE (init) == CONSTRUCTOR)
7031 unsigned HOST_WIDE_INT ix;
7034 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7036 if (tree_int_cst_equal (field, index))
7038 if (!TREE_SIDE_EFFECTS (value))
7039 return expand_expr (fold (value), target, tmode,
7044 else if (TREE_CODE (init) == STRING_CST
7045 && 0 > compare_tree_int (index,
7046 TREE_STRING_LENGTH (init)))
7048 tree type = TREE_TYPE (TREE_TYPE (init));
7049 enum machine_mode mode = TYPE_MODE (type);
7051 if (GET_MODE_CLASS (mode) == MODE_INT
7052 && GET_MODE_SIZE (mode) == 1)
7053 return gen_int_mode (TREE_STRING_POINTER (init)
7054 [TREE_INT_CST_LOW (index)], mode);
7059 goto normal_inner_ref;
7062 /* If the operand is a CONSTRUCTOR, we can just extract the
7063 appropriate field if it is present. */
7064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7066 unsigned HOST_WIDE_INT idx;
7069 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7071 if (field == TREE_OPERAND (exp, 1)
7072 /* We can normally use the value of the field in the
7073 CONSTRUCTOR. However, if this is a bitfield in
7074 an integral mode that we can fit in a HOST_WIDE_INT,
7075 we must mask only the number of bits in the bitfield,
7076 since this is done implicitly by the constructor. If
7077 the bitfield does not meet either of those conditions,
7078 we can't do this optimization. */
7079 && (! DECL_BIT_FIELD (field)
7080 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7081 && (GET_MODE_BITSIZE (DECL_MODE (field))
7082 <= HOST_BITS_PER_WIDE_INT))))
7084 if (DECL_BIT_FIELD (field)
7085 && modifier == EXPAND_STACK_PARM)
7087 op0 = expand_expr (value, target, tmode, modifier);
7088 if (DECL_BIT_FIELD (field))
7090 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7091 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7093 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7095 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7096 op0 = expand_and (imode, op0, op1, target);
7101 = build_int_cst (NULL_TREE,
7102 GET_MODE_BITSIZE (imode) - bitsize);
7104 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7106 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7114 goto normal_inner_ref;
7117 case ARRAY_RANGE_REF:
7120 enum machine_mode mode1;
7121 HOST_WIDE_INT bitsize, bitpos;
7124 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7125 &mode1, &unsignedp, &volatilep, true);
7128 /* If we got back the original object, something is wrong. Perhaps
7129 we are evaluating an expression too early. In any event, don't
7130 infinitely recurse. */
7131 gcc_assert (tem != exp);
7133 /* If TEM's type is a union of variable size, pass TARGET to the inner
7134 computation, since it will need a temporary and TARGET is known
7135 to have to do. This occurs in unchecked conversion in Ada. */
7139 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7140 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7142 && modifier != EXPAND_STACK_PARM
7143 ? target : NULL_RTX),
7145 (modifier == EXPAND_INITIALIZER
7146 || modifier == EXPAND_CONST_ADDRESS
7147 || modifier == EXPAND_STACK_PARM)
7148 ? modifier : EXPAND_NORMAL);
7150 /* If this is a constant, put it into a register if it is a
7151 legitimate constant and OFFSET is 0 and memory if it isn't. */
7152 if (CONSTANT_P (op0))
7154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7155 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7157 op0 = force_reg (mode, op0);
7159 op0 = validize_mem (force_const_mem (mode, op0));
7162 /* Otherwise, if this object not in memory and we either have an
7163 offset or a BLKmode result, put it there. This case can't occur in
7164 C, but can in Ada if we have unchecked conversion of an expression
7165 from a scalar type to an array or record type or for an
7166 ARRAY_RANGE_REF whose type is BLKmode. */
7167 else if (!MEM_P (op0)
7169 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7171 tree nt = build_qualified_type (TREE_TYPE (tem),
7172 (TYPE_QUALS (TREE_TYPE (tem))
7173 | TYPE_QUAL_CONST));
7174 rtx memloc = assign_temp (nt, 1, 1, 1);
7176 emit_move_insn (memloc, op0);
7182 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7185 gcc_assert (MEM_P (op0));
7187 #ifdef POINTERS_EXTEND_UNSIGNED
7188 if (GET_MODE (offset_rtx) != Pmode)
7189 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7191 if (GET_MODE (offset_rtx) != ptr_mode)
7192 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7195 if (GET_MODE (op0) == BLKmode
7196 /* A constant address in OP0 can have VOIDmode, we must
7197 not try to call force_reg in that case. */
7198 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7200 && (bitpos % bitsize) == 0
7201 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7202 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7204 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7208 op0 = offset_address (op0, offset_rtx,
7209 highest_pow2_factor (offset));
7212 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7213 record its alignment as BIGGEST_ALIGNMENT. */
7214 if (MEM_P (op0) && bitpos == 0 && offset != 0
7215 && is_aligning_offset (offset, tem))
7216 set_mem_align (op0, BIGGEST_ALIGNMENT);
7218 /* Don't forget about volatility even if this is a bitfield. */
7219 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7221 if (op0 == orig_op0)
7222 op0 = copy_rtx (op0);
7224 MEM_VOLATILE_P (op0) = 1;
7227 /* The following code doesn't handle CONCAT.
7228 Assume only bitpos == 0 can be used for CONCAT, due to
7229 one element arrays having the same mode as its element. */
7230 if (GET_CODE (op0) == CONCAT)
7232 gcc_assert (bitpos == 0
7233 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7237 /* In cases where an aligned union has an unaligned object
7238 as a field, we might be extracting a BLKmode value from
7239 an integer-mode (e.g., SImode) object. Handle this case
7240 by doing the extract into an object as wide as the field
7241 (which we know to be the width of a basic mode), then
7242 storing into memory, and changing the mode to BLKmode. */
7243 if (mode1 == VOIDmode
7244 || REG_P (op0) || GET_CODE (op0) == SUBREG
7245 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7246 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7247 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7248 && modifier != EXPAND_CONST_ADDRESS
7249 && modifier != EXPAND_INITIALIZER)
7250 /* If the field isn't aligned enough to fetch as a memref,
7251 fetch it as a bit field. */
7252 || (mode1 != BLKmode
7253 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7254 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7256 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7257 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7258 && ((modifier == EXPAND_CONST_ADDRESS
7259 || modifier == EXPAND_INITIALIZER)
7261 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7262 || (bitpos % BITS_PER_UNIT != 0)))
7263 /* If the type and the field are a constant size and the
7264 size of the type isn't the same size as the bitfield,
7265 we must use bitfield operations. */
7267 && TYPE_SIZE (TREE_TYPE (exp))
7268 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7269 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7272 enum machine_mode ext_mode = mode;
7274 if (ext_mode == BLKmode
7275 && ! (target != 0 && MEM_P (op0)
7277 && bitpos % BITS_PER_UNIT == 0))
7278 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7280 if (ext_mode == BLKmode)
7283 target = assign_temp (type, 0, 1, 1);
7288 /* In this case, BITPOS must start at a byte boundary and
7289 TARGET, if specified, must be a MEM. */
7290 gcc_assert (MEM_P (op0)
7291 && (!target || MEM_P (target))
7292 && !(bitpos % BITS_PER_UNIT));
7294 emit_block_move (target,
7295 adjust_address (op0, VOIDmode,
7296 bitpos / BITS_PER_UNIT),
7297 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7299 (modifier == EXPAND_STACK_PARM
7300 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7305 op0 = validize_mem (op0);
7307 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7308 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7310 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7311 (modifier == EXPAND_STACK_PARM
7312 ? NULL_RTX : target),
7313 ext_mode, ext_mode);
7315 /* If the result is a record type and BITSIZE is narrower than
7316 the mode of OP0, an integral mode, and this is a big endian
7317 machine, we must put the field into the high-order bits. */
7318 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7319 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7320 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7321 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7322 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7326 /* If the result type is BLKmode, store the data into a temporary
7327 of the appropriate type, but with the mode corresponding to the
7328 mode for the data we have (op0's mode). It's tempting to make
7329 this a constant type, since we know it's only being stored once,
7330 but that can cause problems if we are taking the address of this
7331 COMPONENT_REF because the MEM of any reference via that address
7332 will have flags corresponding to the type, which will not
7333 necessarily be constant. */
7334 if (mode == BLKmode)
7337 = assign_stack_temp_for_type
7338 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7340 emit_move_insn (new, op0);
7341 op0 = copy_rtx (new);
7342 PUT_MODE (op0, BLKmode);
7343 set_mem_attributes (op0, exp, 1);
7349 /* If the result is BLKmode, use that to access the object
7351 if (mode == BLKmode)
7354 /* Get a reference to just this component. */
7355 if (modifier == EXPAND_CONST_ADDRESS
7356 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7357 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7359 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7361 if (op0 == orig_op0)
7362 op0 = copy_rtx (op0);
7364 set_mem_attributes (op0, exp, 0);
7365 if (REG_P (XEXP (op0, 0)))
7366 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7368 MEM_VOLATILE_P (op0) |= volatilep;
7369 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7370 || modifier == EXPAND_CONST_ADDRESS
7371 || modifier == EXPAND_INITIALIZER)
7373 else if (target == 0)
7374 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7376 convert_move (target, op0, unsignedp);
7381 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7384 /* Check for a built-in function. */
7385 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7386 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7388 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7390 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7391 == BUILT_IN_FRONTEND)
7392 return lang_hooks.expand_expr (exp, original_target,
7396 return expand_builtin (exp, target, subtarget, tmode, ignore);
7399 return expand_call (exp, target, ignore);
7401 case NON_LVALUE_EXPR:
7404 if (TREE_OPERAND (exp, 0) == error_mark_node)
7407 if (TREE_CODE (type) == UNION_TYPE)
7409 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7411 /* If both input and output are BLKmode, this conversion isn't doing
7412 anything except possibly changing memory attribute. */
7413 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7415 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7418 result = copy_rtx (result);
7419 set_mem_attributes (result, exp, 0);
7425 if (TYPE_MODE (type) != BLKmode)
7426 target = gen_reg_rtx (TYPE_MODE (type));
7428 target = assign_temp (type, 0, 1, 1);
7432 /* Store data into beginning of memory target. */
7433 store_expr (TREE_OPERAND (exp, 0),
7434 adjust_address (target, TYPE_MODE (valtype), 0),
7435 modifier == EXPAND_STACK_PARM);
7439 gcc_assert (REG_P (target));
7441 /* Store this field into a union of the proper type. */
7442 store_field (target,
7443 MIN ((int_size_in_bytes (TREE_TYPE
7444 (TREE_OPERAND (exp, 0)))
7446 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7447 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7451 /* Return the entire union. */
7455 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7460 /* If the signedness of the conversion differs and OP0 is
7461 a promoted SUBREG, clear that indication since we now
7462 have to do the proper extension. */
7463 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7464 && GET_CODE (op0) == SUBREG)
7465 SUBREG_PROMOTED_VAR_P (op0) = 0;
7467 return REDUCE_BIT_FIELD (op0);
7470 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7471 if (GET_MODE (op0) == mode)
7474 /* If OP0 is a constant, just convert it into the proper mode. */
7475 else if (CONSTANT_P (op0))
7477 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7478 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7480 if (modifier == EXPAND_INITIALIZER)
7481 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7482 subreg_lowpart_offset (mode,
7485 op0= convert_modes (mode, inner_mode, op0,
7486 TYPE_UNSIGNED (inner_type));
7489 else if (modifier == EXPAND_INITIALIZER)
7490 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7492 else if (target == 0)
7493 op0 = convert_to_mode (mode, op0,
7494 TYPE_UNSIGNED (TREE_TYPE
7495 (TREE_OPERAND (exp, 0))));
7498 convert_move (target, op0,
7499 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7503 return REDUCE_BIT_FIELD (op0);
7505 case VIEW_CONVERT_EXPR:
7506 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7508 /* If the input and output modes are both the same, we are done.
7509 Otherwise, if neither mode is BLKmode and both are integral and within
7510 a word, we can use gen_lowpart. If neither is true, make sure the
7511 operand is in memory and convert the MEM to the new mode. */
7512 if (TYPE_MODE (type) == GET_MODE (op0))
7514 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7515 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7516 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7517 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7518 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7519 op0 = gen_lowpart (TYPE_MODE (type), op0);
7520 else if (!MEM_P (op0))
7522 /* If the operand is not a MEM, force it into memory. Since we
7523 are going to be be changing the mode of the MEM, don't call
7524 force_const_mem for constants because we don't allow pool
7525 constants to change mode. */
7526 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7528 gcc_assert (!TREE_ADDRESSABLE (exp));
7530 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7532 = assign_stack_temp_for_type
7533 (TYPE_MODE (inner_type),
7534 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7536 emit_move_insn (target, op0);
7540 /* At this point, OP0 is in the correct mode. If the output type is such
7541 that the operand is known to be aligned, indicate that it is.
7542 Otherwise, we need only be concerned about alignment for non-BLKmode
7546 op0 = copy_rtx (op0);
7548 if (TYPE_ALIGN_OK (type))
7549 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7550 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7551 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7553 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7554 HOST_WIDE_INT temp_size
7555 = MAX (int_size_in_bytes (inner_type),
7556 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7557 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7558 temp_size, 0, type);
7559 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7561 gcc_assert (!TREE_ADDRESSABLE (exp));
7563 if (GET_MODE (op0) == BLKmode)
7564 emit_block_move (new_with_op0_mode, op0,
7565 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7566 (modifier == EXPAND_STACK_PARM
7567 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7569 emit_move_insn (new_with_op0_mode, op0);
7574 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7580 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7581 something else, make sure we add the register to the constant and
7582 then to the other thing. This case can occur during strength
7583 reduction and doing it this way will produce better code if the
7584 frame pointer or argument pointer is eliminated.
7586 fold-const.c will ensure that the constant is always in the inner
7587 PLUS_EXPR, so the only case we need to do anything about is if
7588 sp, ap, or fp is our second argument, in which case we must swap
7589 the innermost first argument and our second argument. */
7591 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7592 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7593 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7594 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7595 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7596 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7598 tree t = TREE_OPERAND (exp, 1);
7600 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7601 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7604 /* If the result is to be ptr_mode and we are adding an integer to
7605 something, we might be forming a constant. So try to use
7606 plus_constant. If it produces a sum and we can't accept it,
7607 use force_operand. This allows P = &ARR[const] to generate
7608 efficient code on machines where a SYMBOL_REF is not a valid
7611 If this is an EXPAND_SUM call, always return the sum. */
7612 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7613 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7615 if (modifier == EXPAND_STACK_PARM)
7617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7618 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7619 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7623 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7625 /* Use immed_double_const to ensure that the constant is
7626 truncated according to the mode of OP1, then sign extended
7627 to a HOST_WIDE_INT. Using the constant directly can result
7628 in non-canonical RTL in a 64x32 cross compile. */
7630 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7632 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7633 op1 = plus_constant (op1, INTVAL (constant_part));
7634 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7635 op1 = force_operand (op1, target);
7636 return REDUCE_BIT_FIELD (op1);
7639 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7640 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7641 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7645 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7646 (modifier == EXPAND_INITIALIZER
7647 ? EXPAND_INITIALIZER : EXPAND_SUM));
7648 if (! CONSTANT_P (op0))
7650 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7651 VOIDmode, modifier);
7652 /* Return a PLUS if modifier says it's OK. */
7653 if (modifier == EXPAND_SUM
7654 || modifier == EXPAND_INITIALIZER)
7655 return simplify_gen_binary (PLUS, mode, op0, op1);
7658 /* Use immed_double_const to ensure that the constant is
7659 truncated according to the mode of OP1, then sign extended
7660 to a HOST_WIDE_INT. Using the constant directly can result
7661 in non-canonical RTL in a 64x32 cross compile. */
7663 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7665 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7666 op0 = plus_constant (op0, INTVAL (constant_part));
7667 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7668 op0 = force_operand (op0, target);
7669 return REDUCE_BIT_FIELD (op0);
7673 /* No sense saving up arithmetic to be done
7674 if it's all in the wrong mode to form part of an address.
7675 And force_operand won't know whether to sign-extend or
7677 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7678 || mode != ptr_mode)
7680 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7681 subtarget, &op0, &op1, 0);
7682 if (op0 == const0_rtx)
7684 if (op1 == const0_rtx)
7689 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7690 subtarget, &op0, &op1, modifier);
7691 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7694 /* For initializers, we are allowed to return a MINUS of two
7695 symbolic constants. Here we handle all cases when both operands
7697 /* Handle difference of two symbolic constants,
7698 for the sake of an initializer. */
7699 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7700 && really_constant_p (TREE_OPERAND (exp, 0))
7701 && really_constant_p (TREE_OPERAND (exp, 1)))
7703 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7704 NULL_RTX, &op0, &op1, modifier);
7706 /* If the last operand is a CONST_INT, use plus_constant of
7707 the negated constant. Else make the MINUS. */
7708 if (GET_CODE (op1) == CONST_INT)
7709 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7711 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7714 /* No sense saving up arithmetic to be done
7715 if it's all in the wrong mode to form part of an address.
7716 And force_operand won't know whether to sign-extend or
7718 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7719 || mode != ptr_mode)
7722 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7723 subtarget, &op0, &op1, modifier);
7725 /* Convert A - const to A + (-const). */
7726 if (GET_CODE (op1) == CONST_INT)
7728 op1 = negate_rtx (mode, op1);
7729 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7735 /* If first operand is constant, swap them.
7736 Thus the following special case checks need only
7737 check the second operand. */
7738 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7740 tree t1 = TREE_OPERAND (exp, 0);
7741 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7742 TREE_OPERAND (exp, 1) = t1;
7745 /* Attempt to return something suitable for generating an
7746 indexed address, for machines that support that. */
7748 if (modifier == EXPAND_SUM && mode == ptr_mode
7749 && host_integerp (TREE_OPERAND (exp, 1), 0))
7751 tree exp1 = TREE_OPERAND (exp, 1);
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7757 op0 = force_operand (op0, NULL_RTX);
7759 op0 = copy_to_mode_reg (mode, op0);
7761 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7762 gen_int_mode (tree_low_cst (exp1, 0),
7763 TYPE_MODE (TREE_TYPE (exp1)))));
7766 if (modifier == EXPAND_STACK_PARM)
7769 /* Check for multiplying things that have been extended
7770 from a narrower type. If this machine supports multiplying
7771 in that narrower type with a result in the desired type,
7772 do it that way, and avoid the explicit type-conversion. */
7773 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7774 && TREE_CODE (type) == INTEGER_TYPE
7775 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7776 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7777 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7778 && int_fits_type_p (TREE_OPERAND (exp, 1),
7779 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7780 /* Don't use a widening multiply if a shift will do. */
7781 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7782 > HOST_BITS_PER_WIDE_INT)
7783 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7785 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7786 && (TYPE_PRECISION (TREE_TYPE
7787 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7788 == TYPE_PRECISION (TREE_TYPE
7790 (TREE_OPERAND (exp, 0), 0))))
7791 /* If both operands are extended, they must either both
7792 be zero-extended or both be sign-extended. */
7793 && (TYPE_UNSIGNED (TREE_TYPE
7794 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7795 == TYPE_UNSIGNED (TREE_TYPE
7797 (TREE_OPERAND (exp, 0), 0)))))))
7799 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7800 enum machine_mode innermode = TYPE_MODE (op0type);
7801 bool zextend_p = TYPE_UNSIGNED (op0type);
7802 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7803 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7805 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7807 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7809 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7810 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7811 TREE_OPERAND (exp, 1),
7812 NULL_RTX, &op0, &op1, 0);
7814 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7815 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7816 NULL_RTX, &op0, &op1, 0);
7819 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7820 && innermode == word_mode)
7823 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7824 NULL_RTX, VOIDmode, 0);
7825 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7826 op1 = convert_modes (innermode, mode,
7827 expand_expr (TREE_OPERAND (exp, 1),
7828 NULL_RTX, VOIDmode, 0),
7831 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7832 NULL_RTX, VOIDmode, 0);
7833 temp = expand_binop (mode, other_optab, op0, op1, target,
7834 unsignedp, OPTAB_LIB_WIDEN);
7835 hipart = gen_highpart (innermode, temp);
7836 htem = expand_mult_highpart_adjust (innermode, hipart,
7840 emit_move_insn (hipart, htem);
7841 return REDUCE_BIT_FIELD (temp);
7845 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7846 subtarget, &op0, &op1, 0);
7847 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7849 case TRUNC_DIV_EXPR:
7850 case FLOOR_DIV_EXPR:
7852 case ROUND_DIV_EXPR:
7853 case EXACT_DIV_EXPR:
7854 if (modifier == EXPAND_STACK_PARM)
7856 /* Possible optimization: compute the dividend with EXPAND_SUM
7857 then if the divisor is constant can optimize the case
7858 where some terms of the dividend have coeffs divisible by it. */
7859 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7860 subtarget, &op0, &op1, 0);
7861 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7866 case TRUNC_MOD_EXPR:
7867 case FLOOR_MOD_EXPR:
7869 case ROUND_MOD_EXPR:
7870 if (modifier == EXPAND_STACK_PARM)
7872 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7873 subtarget, &op0, &op1, 0);
7874 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7876 case FIX_ROUND_EXPR:
7877 case FIX_FLOOR_EXPR:
7879 gcc_unreachable (); /* Not used for C. */
7881 case FIX_TRUNC_EXPR:
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7883 if (target == 0 || modifier == EXPAND_STACK_PARM)
7884 target = gen_reg_rtx (mode);
7885 expand_fix (target, op0, unsignedp);
7889 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7890 if (target == 0 || modifier == EXPAND_STACK_PARM)
7891 target = gen_reg_rtx (mode);
7892 /* expand_float can't figure out what to do if FROM has VOIDmode.
7893 So give it the correct mode. With -O, cse will optimize this. */
7894 if (GET_MODE (op0) == VOIDmode)
7895 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7897 expand_float (target, op0,
7898 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7902 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7903 if (modifier == EXPAND_STACK_PARM)
7905 temp = expand_unop (mode,
7906 optab_for_tree_code (NEGATE_EXPR, type),
7909 return REDUCE_BIT_FIELD (temp);
7912 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7913 if (modifier == EXPAND_STACK_PARM)
7916 /* ABS_EXPR is not valid for complex arguments. */
7917 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7918 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7920 /* Unsigned abs is simply the operand. Testing here means we don't
7921 risk generating incorrect code below. */
7922 if (TYPE_UNSIGNED (type))
7925 return expand_abs (mode, op0, target, unsignedp,
7926 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7930 target = original_target;
7932 || modifier == EXPAND_STACK_PARM
7933 || (MEM_P (target) && MEM_VOLATILE_P (target))
7934 || GET_MODE (target) != mode
7936 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7937 target = gen_reg_rtx (mode);
7938 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7939 target, &op0, &op1, 0);
7941 /* First try to do it with a special MIN or MAX instruction.
7942 If that does not win, use a conditional jump to select the proper
7944 this_optab = optab_for_tree_code (code, type);
7945 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7950 /* At this point, a MEM target is no longer useful; we will get better
7953 if (! REG_P (target))
7954 target = gen_reg_rtx (mode);
7956 /* If op1 was placed in target, swap op0 and op1. */
7957 if (target != op0 && target == op1)
7964 /* We generate better code and avoid problems with op1 mentioning
7965 target by forcing op1 into a pseudo if it isn't a constant. */
7966 if (! CONSTANT_P (op1))
7967 op1 = force_reg (mode, op1);
7969 #ifdef HAVE_conditional_move
7970 /* Use a conditional move if possible. */
7971 if (can_conditionally_move_p (mode))
7973 enum rtx_code comparison_code;
7976 if (code == MAX_EXPR)
7977 comparison_code = unsignedp ? GEU : GE;
7979 comparison_code = unsignedp ? LEU : LE;
7981 /* ??? Same problem as in expmed.c: emit_conditional_move
7982 forces a stack adjustment via compare_from_rtx, and we
7983 lose the stack adjustment if the sequence we are about
7984 to create is discarded. */
7985 do_pending_stack_adjust ();
7989 /* Try to emit the conditional move. */
7990 insn = emit_conditional_move (target, comparison_code,
7995 /* If we could do the conditional move, emit the sequence,
7999 rtx seq = get_insns ();
8005 /* Otherwise discard the sequence and fall back to code with
8011 emit_move_insn (target, op0);
8013 temp = gen_label_rtx ();
8015 /* If this mode is an integer too wide to compare properly,
8016 compare word by word. Rely on cse to optimize constant cases. */
8017 if (GET_MODE_CLASS (mode) == MODE_INT
8018 && ! can_compare_p (GE, mode, ccp_jump))
8020 if (code == MAX_EXPR)
8021 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8024 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8029 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8030 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8032 emit_move_insn (target, op1);
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8038 if (modifier == EXPAND_STACK_PARM)
8040 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8044 /* ??? Can optimize bitwise operations with one arg constant.
8045 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8046 and (a bitwise1 b) bitwise2 b (etc)
8047 but that is probably not worth while. */
8049 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8050 boolean values when we want in all cases to compute both of them. In
8051 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8052 as actual zero-or-1 values and then bitwise anding. In cases where
8053 there cannot be any side effects, better code would be made by
8054 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8055 how to recognize those cases. */
8057 case TRUTH_AND_EXPR:
8058 code = BIT_AND_EXPR;
8063 code = BIT_IOR_EXPR;
8067 case TRUTH_XOR_EXPR:
8068 code = BIT_XOR_EXPR;
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8078 if (modifier == EXPAND_STACK_PARM)
8080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8081 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8084 /* Could determine the answer when only additive constants differ. Also,
8085 the addition of one can be handled by changing the condition. */
8092 case UNORDERED_EXPR:
8100 temp = do_store_flag (exp,
8101 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8102 tmode != VOIDmode ? tmode : mode, 0);
8106 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8107 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8109 && REG_P (original_target)
8110 && (GET_MODE (original_target)
8111 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8113 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8116 /* If temp is constant, we can just compute the result. */
8117 if (GET_CODE (temp) == CONST_INT)
8119 if (INTVAL (temp) != 0)
8120 emit_move_insn (target, const1_rtx);
8122 emit_move_insn (target, const0_rtx);
8127 if (temp != original_target)
8129 enum machine_mode mode1 = GET_MODE (temp);
8130 if (mode1 == VOIDmode)
8131 mode1 = tmode != VOIDmode ? tmode : mode;
8133 temp = copy_to_mode_reg (mode1, temp);
8136 op1 = gen_label_rtx ();
8137 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8138 GET_MODE (temp), unsignedp, op1);
8139 emit_move_insn (temp, const1_rtx);
8144 /* If no set-flag instruction, must generate a conditional store
8145 into a temporary variable. Drop through and handle this
8150 || modifier == EXPAND_STACK_PARM
8151 || ! safe_from_p (target, exp, 1)
8152 /* Make sure we don't have a hard reg (such as function's return
8153 value) live across basic blocks, if not optimizing. */
8154 || (!optimize && REG_P (target)
8155 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8156 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8159 emit_move_insn (target, const0_rtx);
8161 op1 = gen_label_rtx ();
8162 jumpifnot (exp, op1);
8165 emit_move_insn (target, const1_rtx);
8168 return ignore ? const0_rtx : target;
8170 case TRUTH_NOT_EXPR:
8171 if (modifier == EXPAND_STACK_PARM)
8173 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8174 /* The parser is careful to generate TRUTH_NOT_EXPR
8175 only with operands that are always zero or one. */
8176 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8177 target, 1, OPTAB_LIB_WIDEN);
8181 case STATEMENT_LIST:
8183 tree_stmt_iterator iter;
8185 gcc_assert (ignore);
8187 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8188 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8193 /* A COND_EXPR with its type being VOID_TYPE represents a
8194 conditional jump and is handled in
8195 expand_gimple_cond_expr. */
8196 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8198 /* Note that COND_EXPRs whose type is a structure or union
8199 are required to be constructed to contain assignments of
8200 a temporary variable, so that we can evaluate them here
8201 for side effect only. If type is void, we must do likewise. */
8203 gcc_assert (!TREE_ADDRESSABLE (type)
8205 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8206 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8208 /* If we are not to produce a result, we have no target. Otherwise,
8209 if a target was specified use it; it will not be used as an
8210 intermediate target unless it is safe. If no target, use a
8213 if (modifier != EXPAND_STACK_PARM
8215 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8216 && GET_MODE (original_target) == mode
8217 #ifdef HAVE_conditional_move
8218 && (! can_conditionally_move_p (mode)
8219 || REG_P (original_target))
8221 && !MEM_P (original_target))
8222 temp = original_target;
8224 temp = assign_temp (type, 0, 0, 1);
8226 do_pending_stack_adjust ();
8228 op0 = gen_label_rtx ();
8229 op1 = gen_label_rtx ();
8230 jumpifnot (TREE_OPERAND (exp, 0), op0);
8231 store_expr (TREE_OPERAND (exp, 1), temp,
8232 modifier == EXPAND_STACK_PARM);
8234 emit_jump_insn (gen_jump (op1));
8237 store_expr (TREE_OPERAND (exp, 2), temp,
8238 modifier == EXPAND_STACK_PARM);
8245 target = expand_vec_cond_expr (exp, target);
8250 tree lhs = TREE_OPERAND (exp, 0);
8251 tree rhs = TREE_OPERAND (exp, 1);
8253 gcc_assert (ignore);
8255 /* Check for |= or &= of a bitfield of size one into another bitfield
8256 of size 1. In this case, (unless we need the result of the
8257 assignment) we can do this more efficiently with a
8258 test followed by an assignment, if necessary.
8260 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8261 things change so we do, this code should be enhanced to
8263 if (TREE_CODE (lhs) == COMPONENT_REF
8264 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8265 || TREE_CODE (rhs) == BIT_AND_EXPR)
8266 && TREE_OPERAND (rhs, 0) == lhs
8267 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8268 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8269 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8271 rtx label = gen_label_rtx ();
8273 do_jump (TREE_OPERAND (rhs, 1),
8274 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8275 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8276 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8277 (TREE_CODE (rhs) == BIT_IOR_EXPR
8279 : integer_zero_node)));
8280 do_pending_stack_adjust ();
8285 expand_assignment (lhs, rhs);
8291 if (!TREE_OPERAND (exp, 0))
8292 expand_null_return ();
8294 expand_return (TREE_OPERAND (exp, 0));
8298 return expand_expr_addr_expr (exp, target, tmode, modifier);
8301 /* Get the rtx code of the operands. */
8302 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8303 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8308 /* Move the real (op0) and imaginary (op1) parts to their location. */
8309 write_complex_part (target, op0, false);
8310 write_complex_part (target, op1, true);
8315 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8316 return read_complex_part (op0, false);
8319 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8320 return read_complex_part (op0, true);
8323 expand_resx_expr (exp);
8326 case TRY_CATCH_EXPR:
8328 case EH_FILTER_EXPR:
8329 case TRY_FINALLY_EXPR:
8330 /* Lowered by tree-eh.c. */
8333 case WITH_CLEANUP_EXPR:
8334 case CLEANUP_POINT_EXPR:
8336 case CASE_LABEL_EXPR:
8342 case PREINCREMENT_EXPR:
8343 case PREDECREMENT_EXPR:
8344 case POSTINCREMENT_EXPR:
8345 case POSTDECREMENT_EXPR:
8348 case TRUTH_ANDIF_EXPR:
8349 case TRUTH_ORIF_EXPR:
8350 /* Lowered by gimplify.c. */
8354 return get_exception_pointer (cfun);
8357 return get_exception_filter (cfun);
8360 /* Function descriptors are not valid except for as
8361 initialization constants, and should not be expanded. */
8369 expand_label (TREE_OPERAND (exp, 0));
8373 expand_asm_expr (exp);
8376 case WITH_SIZE_EXPR:
8377 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8378 have pulled out the size to use in whatever context it needed. */
8379 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8382 case REALIGN_LOAD_EXPR:
8384 tree oprnd0 = TREE_OPERAND (exp, 0);
8385 tree oprnd1 = TREE_OPERAND (exp, 1);
8386 tree oprnd2 = TREE_OPERAND (exp, 2);
8389 this_optab = optab_for_tree_code (code, type);
8390 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8391 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8392 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8398 case REDUC_MAX_EXPR:
8399 case REDUC_MIN_EXPR:
8400 case REDUC_PLUS_EXPR:
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403 this_optab = optab_for_tree_code (code, type);
8404 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8409 case VEC_LSHIFT_EXPR:
8410 case VEC_RSHIFT_EXPR:
8412 target = expand_vec_shift_expr (exp, target);
8417 return lang_hooks.expand_expr (exp, original_target, tmode,
8421 /* Here to do an ordinary binary operator. */
8423 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8424 subtarget, &op0, &op1, 0);
8426 this_optab = optab_for_tree_code (code, type);
8428 if (modifier == EXPAND_STACK_PARM)
8430 temp = expand_binop (mode, this_optab, op0, op1, target,
8431 unsignedp, OPTAB_LIB_WIDEN);
8433 return REDUCE_BIT_FIELD (temp);
8435 #undef REDUCE_BIT_FIELD
8437 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8438 signedness of TYPE), possibly returning the result in TARGET. */
8440 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8442 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8443 if (target && GET_MODE (target) != GET_MODE (exp))
8445 if (TYPE_UNSIGNED (type))
8448 if (prec < HOST_BITS_PER_WIDE_INT)
8449 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8452 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8453 ((unsigned HOST_WIDE_INT) 1
8454 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8456 return expand_and (GET_MODE (exp), exp, mask, target);
8460 tree count = build_int_cst (NULL_TREE,
8461 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8462 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8463 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8467 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8468 when applied to the address of EXP produces an address known to be
8469 aligned more than BIGGEST_ALIGNMENT. */
8472 is_aligning_offset (tree offset, tree exp)
8474 /* Strip off any conversions. */
8475 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8476 || TREE_CODE (offset) == NOP_EXPR
8477 || TREE_CODE (offset) == CONVERT_EXPR)
8478 offset = TREE_OPERAND (offset, 0);
8480 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8481 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8482 if (TREE_CODE (offset) != BIT_AND_EXPR
8483 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8484 || compare_tree_int (TREE_OPERAND (offset, 1),
8485 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8486 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8489 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8490 It must be NEGATE_EXPR. Then strip any more conversions. */
8491 offset = TREE_OPERAND (offset, 0);
8492 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8493 || TREE_CODE (offset) == NOP_EXPR
8494 || TREE_CODE (offset) == CONVERT_EXPR)
8495 offset = TREE_OPERAND (offset, 0);
8497 if (TREE_CODE (offset) != NEGATE_EXPR)
8500 offset = TREE_OPERAND (offset, 0);
8501 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8502 || TREE_CODE (offset) == NOP_EXPR
8503 || TREE_CODE (offset) == CONVERT_EXPR)
8504 offset = TREE_OPERAND (offset, 0);
8506 /* This must now be the address of EXP. */
8507 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8510 /* Return the tree node if an ARG corresponds to a string constant or zero
8511 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8512 in bytes within the string that ARG is accessing. The type of the
8513 offset will be `sizetype'. */
8516 string_constant (tree arg, tree *ptr_offset)
8521 if (TREE_CODE (arg) == ADDR_EXPR)
8523 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8525 *ptr_offset = size_zero_node;
8526 return TREE_OPERAND (arg, 0);
8528 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8530 array = TREE_OPERAND (arg, 0);
8531 offset = size_zero_node;
8533 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8535 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8536 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8537 if (TREE_CODE (array) != STRING_CST
8538 && TREE_CODE (array) != VAR_DECL)
8544 else if (TREE_CODE (arg) == PLUS_EXPR)
8546 tree arg0 = TREE_OPERAND (arg, 0);
8547 tree arg1 = TREE_OPERAND (arg, 1);
8552 if (TREE_CODE (arg0) == ADDR_EXPR
8553 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8554 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8556 array = TREE_OPERAND (arg0, 0);
8559 else if (TREE_CODE (arg1) == ADDR_EXPR
8560 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8561 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8563 array = TREE_OPERAND (arg1, 0);
8572 if (TREE_CODE (array) == STRING_CST)
8574 *ptr_offset = convert (sizetype, offset);
8577 else if (TREE_CODE (array) == VAR_DECL)
8581 /* Variables initialized to string literals can be handled too. */
8582 if (DECL_INITIAL (array) == NULL_TREE
8583 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8586 /* If they are read-only, non-volatile and bind locally. */
8587 if (! TREE_READONLY (array)
8588 || TREE_SIDE_EFFECTS (array)
8589 || ! targetm.binds_local_p (array))
8592 /* Avoid const char foo[4] = "abcde"; */
8593 if (DECL_SIZE_UNIT (array) == NULL_TREE
8594 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8595 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8596 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8599 /* If variable is bigger than the string literal, OFFSET must be constant
8600 and inside of the bounds of the string literal. */
8601 offset = convert (sizetype, offset);
8602 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8603 && (! host_integerp (offset, 1)
8604 || compare_tree_int (offset, length) >= 0))
8607 *ptr_offset = offset;
8608 return DECL_INITIAL (array);
8614 /* Generate code to calculate EXP using a store-flag instruction
8615 and return an rtx for the result. EXP is either a comparison
8616 or a TRUTH_NOT_EXPR whose operand is a comparison.
8618 If TARGET is nonzero, store the result there if convenient.
8620 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8623 Return zero if there is no suitable set-flag instruction
8624 available on this machine.
8626 Once expand_expr has been called on the arguments of the comparison,
8627 we are committed to doing the store flag, since it is not safe to
8628 re-evaluate the expression. We emit the store-flag insn by calling
8629 emit_store_flag, but only expand the arguments if we have a reason
8630 to believe that emit_store_flag will be successful. If we think that
8631 it will, but it isn't, we have to simulate the store-flag with a
8632 set/jump/set sequence. */
8635 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8638 tree arg0, arg1, type;
8640 enum machine_mode operand_mode;
8644 enum insn_code icode;
8645 rtx subtarget = target;
8648 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8649 result at the end. We can't simply invert the test since it would
8650 have already been inverted if it were valid. This case occurs for
8651 some floating-point comparisons. */
8653 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8654 invert = 1, exp = TREE_OPERAND (exp, 0);
8656 arg0 = TREE_OPERAND (exp, 0);
8657 arg1 = TREE_OPERAND (exp, 1);
8659 /* Don't crash if the comparison was erroneous. */
8660 if (arg0 == error_mark_node || arg1 == error_mark_node)
8663 type = TREE_TYPE (arg0);
8664 operand_mode = TYPE_MODE (type);
8665 unsignedp = TYPE_UNSIGNED (type);
8667 /* We won't bother with BLKmode store-flag operations because it would mean
8668 passing a lot of information to emit_store_flag. */
8669 if (operand_mode == BLKmode)
8672 /* We won't bother with store-flag operations involving function pointers
8673 when function pointers must be canonicalized before comparisons. */
8674 #ifdef HAVE_canonicalize_funcptr_for_compare
8675 if (HAVE_canonicalize_funcptr_for_compare
8676 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8677 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8679 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8680 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8681 == FUNCTION_TYPE))))
8688 /* Get the rtx comparison code to use. We know that EXP is a comparison
8689 operation of some type. Some comparisons against 1 and -1 can be
8690 converted to comparisons with zero. Do so here so that the tests
8691 below will be aware that we have a comparison with zero. These
8692 tests will not catch constants in the first operand, but constants
8693 are rarely passed as the first operand. */
8695 switch (TREE_CODE (exp))
8704 if (integer_onep (arg1))
8705 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8707 code = unsignedp ? LTU : LT;
8710 if (! unsignedp && integer_all_onesp (arg1))
8711 arg1 = integer_zero_node, code = LT;
8713 code = unsignedp ? LEU : LE;
8716 if (! unsignedp && integer_all_onesp (arg1))
8717 arg1 = integer_zero_node, code = GE;
8719 code = unsignedp ? GTU : GT;
8722 if (integer_onep (arg1))
8723 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8725 code = unsignedp ? GEU : GE;
8728 case UNORDERED_EXPR:
8757 /* Put a constant second. */
8758 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8760 tem = arg0; arg0 = arg1; arg1 = tem;
8761 code = swap_condition (code);
8764 /* If this is an equality or inequality test of a single bit, we can
8765 do this by shifting the bit being tested to the low-order bit and
8766 masking the result with the constant 1. If the condition was EQ,
8767 we xor it with 1. This does not require an scc insn and is faster
8768 than an scc insn even if we have it.
8770 The code to make this transformation was moved into fold_single_bit_test,
8771 so we just call into the folder and expand its result. */
8773 if ((code == NE || code == EQ)
8774 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8775 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8777 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8778 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8780 target, VOIDmode, EXPAND_NORMAL);
8783 /* Now see if we are likely to be able to do this. Return if not. */
8784 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8787 icode = setcc_gen_code[(int) code];
8788 if (icode == CODE_FOR_nothing
8789 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8791 /* We can only do this if it is one of the special cases that
8792 can be handled without an scc insn. */
8793 if ((code == LT && integer_zerop (arg1))
8794 || (! only_cheap && code == GE && integer_zerop (arg1)))
8796 else if (! only_cheap && (code == NE || code == EQ)
8797 && TREE_CODE (type) != REAL_TYPE
8798 && ((abs_optab->handlers[(int) operand_mode].insn_code
8799 != CODE_FOR_nothing)
8800 || (ffs_optab->handlers[(int) operand_mode].insn_code
8801 != CODE_FOR_nothing)))
8807 if (! get_subtarget (target)
8808 || GET_MODE (subtarget) != operand_mode)
8811 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8814 target = gen_reg_rtx (mode);
8816 result = emit_store_flag (target, code, op0, op1,
8817 operand_mode, unsignedp, 1);
8822 result = expand_binop (mode, xor_optab, result, const1_rtx,
8823 result, 0, OPTAB_LIB_WIDEN);
8827 /* If this failed, we have to do this with set/compare/jump/set code. */
8829 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8830 target = gen_reg_rtx (GET_MODE (target));
8832 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8833 result = compare_from_rtx (op0, op1, code, unsignedp,
8834 operand_mode, NULL_RTX);
8835 if (GET_CODE (result) == CONST_INT)
8836 return (((result == const0_rtx && ! invert)
8837 || (result != const0_rtx && invert))
8838 ? const0_rtx : const1_rtx);
8840 /* The code of RESULT may not match CODE if compare_from_rtx
8841 decided to swap its operands and reverse the original code.
8843 We know that compare_from_rtx returns either a CONST_INT or
8844 a new comparison code, so it is safe to just extract the
8845 code from RESULT. */
8846 code = GET_CODE (result);
8848 label = gen_label_rtx ();
8849 gcc_assert (bcc_gen_fctn[(int) code]);
8851 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8852 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8859 /* Stubs in case we haven't got a casesi insn. */
8861 # define HAVE_casesi 0
8862 # define gen_casesi(a, b, c, d, e) (0)
8863 # define CODE_FOR_casesi CODE_FOR_nothing
8866 /* If the machine does not have a case insn that compares the bounds,
8867 this means extra overhead for dispatch tables, which raises the
8868 threshold for using them. */
8869 #ifndef CASE_VALUES_THRESHOLD
8870 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8871 #endif /* CASE_VALUES_THRESHOLD */
8874 case_values_threshold (void)
8876 return CASE_VALUES_THRESHOLD;
8879 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8880 0 otherwise (i.e. if there is no casesi instruction). */
8882 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8883 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8885 enum machine_mode index_mode = SImode;
8886 int index_bits = GET_MODE_BITSIZE (index_mode);
8887 rtx op1, op2, index;
8888 enum machine_mode op_mode;
8893 /* Convert the index to SImode. */
8894 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8896 enum machine_mode omode = TYPE_MODE (index_type);
8897 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8899 /* We must handle the endpoints in the original mode. */
8900 index_expr = build2 (MINUS_EXPR, index_type,
8901 index_expr, minval);
8902 minval = integer_zero_node;
8903 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8904 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8905 omode, 1, default_label);
8906 /* Now we can safely truncate. */
8907 index = convert_to_mode (index_mode, index, 0);
8911 if (TYPE_MODE (index_type) != index_mode)
8913 index_expr = convert (lang_hooks.types.type_for_size
8914 (index_bits, 0), index_expr);
8915 index_type = TREE_TYPE (index_expr);
8918 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8921 do_pending_stack_adjust ();
8923 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8924 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8926 index = copy_to_mode_reg (op_mode, index);
8928 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8930 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8931 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8932 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8933 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8935 op1 = copy_to_mode_reg (op_mode, op1);
8937 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8939 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8940 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8941 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8942 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8944 op2 = copy_to_mode_reg (op_mode, op2);
8946 emit_jump_insn (gen_casesi (index, op1, op2,
8947 table_label, default_label));
8951 /* Attempt to generate a tablejump instruction; same concept. */
8952 #ifndef HAVE_tablejump
8953 #define HAVE_tablejump 0
8954 #define gen_tablejump(x, y) (0)
8957 /* Subroutine of the next function.
8959 INDEX is the value being switched on, with the lowest value
8960 in the table already subtracted.
8961 MODE is its expected mode (needed if INDEX is constant).
8962 RANGE is the length of the jump table.
8963 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8965 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8966 index value is out of range. */
8969 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8974 if (INTVAL (range) > cfun->max_jumptable_ents)
8975 cfun->max_jumptable_ents = INTVAL (range);
8977 /* Do an unsigned comparison (in the proper mode) between the index
8978 expression and the value which represents the length of the range.
8979 Since we just finished subtracting the lower bound of the range
8980 from the index expression, this comparison allows us to simultaneously
8981 check that the original index expression value is both greater than
8982 or equal to the minimum value of the range and less than or equal to
8983 the maximum value of the range. */
8985 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8988 /* If index is in range, it must fit in Pmode.
8989 Convert to Pmode so we can index with it. */
8991 index = convert_to_mode (Pmode, index, 1);
8993 /* Don't let a MEM slip through, because then INDEX that comes
8994 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8995 and break_out_memory_refs will go to work on it and mess it up. */
8996 #ifdef PIC_CASE_VECTOR_ADDRESS
8997 if (flag_pic && !REG_P (index))
8998 index = copy_to_mode_reg (Pmode, index);
9001 /* If flag_force_addr were to affect this address
9002 it could interfere with the tricky assumptions made
9003 about addresses that contain label-refs,
9004 which may be valid only very near the tablejump itself. */
9005 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9006 GET_MODE_SIZE, because this indicates how large insns are. The other
9007 uses should all be Pmode, because they are addresses. This code
9008 could fail if addresses and insns are not the same size. */
9009 index = gen_rtx_PLUS (Pmode,
9010 gen_rtx_MULT (Pmode, index,
9011 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9012 gen_rtx_LABEL_REF (Pmode, table_label));
9013 #ifdef PIC_CASE_VECTOR_ADDRESS
9015 index = PIC_CASE_VECTOR_ADDRESS (index);
9018 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9019 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9020 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9021 convert_move (temp, vector, 0);
9023 emit_jump_insn (gen_tablejump (temp, table_label));
9025 /* If we are generating PIC code or if the table is PC-relative, the
9026 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9027 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9032 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9033 rtx table_label, rtx default_label)
9037 if (! HAVE_tablejump)
9040 index_expr = fold_build2 (MINUS_EXPR, index_type,
9041 convert (index_type, index_expr),
9042 convert (index_type, minval));
9043 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9044 do_pending_stack_adjust ();
9046 do_tablejump (index, TYPE_MODE (index_type),
9047 convert_modes (TYPE_MODE (index_type),
9048 TYPE_MODE (TREE_TYPE (range)),
9049 expand_expr (range, NULL_RTX,
9051 TYPE_UNSIGNED (TREE_TYPE (range))),
9052 table_label, default_label);
9056 /* Nonzero if the mode is a valid vector mode for this architecture.
9057 This returns nonzero even if there is no hardware support for the
9058 vector mode, but we can emulate with narrower modes. */
9061 vector_mode_valid_p (enum machine_mode mode)
9063 enum mode_class class = GET_MODE_CLASS (mode);
9064 enum machine_mode innermode;
9066 /* Doh! What's going on? */
9067 if (class != MODE_VECTOR_INT
9068 && class != MODE_VECTOR_FLOAT)
9071 /* Hardware support. Woo hoo! */
9072 if (targetm.vector_mode_supported_p (mode))
9075 innermode = GET_MODE_INNER (mode);
9077 /* We should probably return 1 if requesting V4DI and we have no DI,
9078 but we have V2DI, but this is probably very unlikely. */
9080 /* If we have support for the inner mode, we can safely emulate it.
9081 We may not have V2DI, but me can emulate with a pair of DIs. */
9082 return targetm.scalar_mode_supported_p (innermode);
9085 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9087 const_vector_from_tree (tree exp)
9092 enum machine_mode inner, mode;
9094 mode = TYPE_MODE (TREE_TYPE (exp));
9096 if (initializer_zerop (exp))
9097 return CONST0_RTX (mode);
9099 units = GET_MODE_NUNITS (mode);
9100 inner = GET_MODE_INNER (mode);
9102 v = rtvec_alloc (units);
9104 link = TREE_VECTOR_CST_ELTS (exp);
9105 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9107 elt = TREE_VALUE (link);
9109 if (TREE_CODE (elt) == REAL_CST)
9110 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9113 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9114 TREE_INT_CST_HIGH (elt),
9118 /* Initialize remaining elements to 0. */
9119 for (; i < units; ++i)
9120 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9122 return gen_rtx_CONST_VECTOR (mode, v);
9124 #include "gt-expr.h"