1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
148 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
244 init_expr_once (void)
247 enum machine_mode mode;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 if (! HARD_REGNO_MODE_OK (regno, mode))
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 /* Try converting directly if the insn is supported. */
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
440 insns = get_insns ();
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
475 if (to_mode == full_mode)
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
486 /* else proceed to integer conversions below. */
487 from_mode = full_mode;
491 /* Now both modes are integers. */
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
515 emit_unop_insn (code, to, from, equiv_code);
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
535 /* No special multiword conversion insn; do it by hand. */
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
548 lowpart_mode = from_mode;
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
555 /* Compute the value to put in each remaining word. */
557 fill_value = const0_rtx;
562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563 && STORE_FLAG_VALUE == -1)
565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
587 gcc_assert (subword);
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
593 insns = get_insns ();
596 emit_no_conflict_block (insns, to, from, NULL_RTX,
597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
612 convert_move (to, gen_lowpart (word_mode, from), 0);
616 /* Now follow all the conversions between integers
617 no more than a word long. */
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622 GET_MODE_BITSIZE (from_mode)))
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
634 emit_move_insn (to, gen_lowpart (to_mode, from));
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
646 from = force_not_mem (from);
648 emit_unop_insn (code, to, from, equiv_code);
653 enum machine_mode intermediate;
657 /* Search for a mode to convert via. */
658 for (intermediate = from_mode; intermediate != VOIDmode;
659 intermediate = GET_MODE_WIDER_MODE (intermediate))
660 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
663 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
664 GET_MODE_BITSIZE (intermediate))))
665 && (can_extend_p (intermediate, from_mode, unsignedp)
666 != CODE_FOR_nothing))
668 convert_move (to, convert_to_mode (intermediate, from,
669 unsignedp), unsignedp);
673 /* No suitable intermediate mode.
674 Generate what we need with shifts. */
675 shift_amount = build_int_cst (NULL_TREE,
676 GET_MODE_BITSIZE (to_mode)
677 - GET_MODE_BITSIZE (from_mode));
678 from = gen_lowpart (to_mode, force_reg (from_mode, from));
679 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
684 emit_move_insn (to, tmp);
689 /* Support special truncate insns for certain modes. */
690 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
692 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
697 /* Handle truncation of volatile memrefs, and so on;
698 the things that couldn't be truncated directly,
699 and for which there was no special instruction.
701 ??? Code above formerly short-circuited this, for most integer
702 mode pairs, with a force_reg in from_mode followed by a recursive
703 call to this routine. Appears always to have been wrong. */
704 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
706 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
707 emit_move_insn (to, temp);
711 /* Mode combination is not recognized. */
715 /* Return an rtx for a value that would result
716 from converting X to mode MODE.
717 Both X and MODE may be floating, or both integer.
718 UNSIGNEDP is nonzero if X is an unsigned value.
719 This can be done by referring to a part of X in place
720 or by copying to a new temporary with conversion. */
723 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
725 return convert_modes (mode, VOIDmode, x, unsignedp);
728 /* Return an rtx for a value that would result
729 from converting X from mode OLDMODE to mode MODE.
730 Both modes may be floating, or both integer.
731 UNSIGNEDP is nonzero if X is an unsigned value.
733 This can be done by referring to a part of X in place
734 or by copying to a new temporary with conversion.
736 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
739 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 /* If FROM is a SUBREG that indicates that we have already done at least
744 the required extension, strip it. */
746 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
747 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
748 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
749 x = gen_lowpart (mode, x);
751 if (GET_MODE (x) != VOIDmode)
752 oldmode = GET_MODE (x);
757 /* There is one case that we must handle specially: If we are converting
758 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
759 we are to interpret the constant as unsigned, gen_lowpart will do
760 the wrong if the constant appears negative. What we want to do is
761 make the high-order word of the constant zero, not all ones. */
763 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
764 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
765 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
767 HOST_WIDE_INT val = INTVAL (x);
769 if (oldmode != VOIDmode
770 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
772 int width = GET_MODE_BITSIZE (oldmode);
774 /* We need to zero extend VAL. */
775 val &= ((HOST_WIDE_INT) 1 << width) - 1;
778 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
781 /* We can do this with a gen_lowpart if both desired and current modes
782 are integer, and this is either a constant integer, a register, or a
783 non-volatile MEM. Except for the constant case where MODE is no
784 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786 if ((GET_CODE (x) == CONST_INT
787 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
788 || (GET_MODE_CLASS (mode) == MODE_INT
789 && GET_MODE_CLASS (oldmode) == MODE_INT
790 && (GET_CODE (x) == CONST_DOUBLE
791 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
792 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
793 && direct_load[(int) mode])
795 && (! HARD_REGISTER_P (x)
796 || HARD_REGNO_MODE_OK (REGNO (x), mode))
797 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
798 GET_MODE_BITSIZE (GET_MODE (x)))))))))
800 /* ?? If we don't know OLDMODE, we have to assume here that
801 X does not need sign- or zero-extension. This may not be
802 the case, but it's the best we can do. */
803 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
804 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
806 HOST_WIDE_INT val = INTVAL (x);
807 int width = GET_MODE_BITSIZE (oldmode);
809 /* We must sign or zero-extend in this case. Start by
810 zero-extending, then sign extend if we need to. */
811 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
814 val |= (HOST_WIDE_INT) (-1) << width;
816 return gen_int_mode (val, mode);
819 return gen_lowpart (mode, x);
822 /* Converting from integer constant into mode is always equivalent to an
824 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
826 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
827 return simplify_gen_subreg (mode, x, oldmode, 0);
830 temp = gen_reg_rtx (mode);
831 convert_move (temp, x, unsignedp);
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
847 can_move_by_pieces (unsigned HOST_WIDE_INT len,
848 unsigned int align ATTRIBUTE_UNUSED)
850 return MOVE_BY_PIECES_P (len, align);
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
859 ALIGN is maximum stack alignment we can assume.
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
867 unsigned int align, int endp)
869 struct move_by_pieces data;
870 rtx to_addr, from_addr = XEXP (from, 0);
871 unsigned int max_size = MOVE_MAX_PIECES + 1;
872 enum machine_mode mode = VOIDmode, tmode;
873 enum insn_code icode;
875 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.from_addr = from_addr;
881 to_addr = XEXP (to, 0);
884 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
885 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
894 #ifdef STACK_GROWS_DOWNWARD
900 data.to_addr = to_addr;
903 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
904 || GET_CODE (from_addr) == POST_INC
905 || GET_CODE (from_addr) == POST_DEC);
907 data.explicit_inc_from = 0;
908 data.explicit_inc_to = 0;
909 if (data.reverse) data.offset = len;
912 /* If copying requires more than two move insns,
913 copy addresses to registers (to make displacements shorter)
914 and use post-increment if available. */
915 if (!(data.autinc_from && data.autinc_to)
916 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 /* Find the mode of the largest move... */
919 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
920 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
921 if (GET_MODE_SIZE (tmode) < max_size)
924 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
927 data.autinc_from = 1;
928 data.explicit_inc_from = -1;
930 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 data.from_addr = copy_addr_to_reg (from_addr);
933 data.autinc_from = 1;
934 data.explicit_inc_from = 1;
936 if (!data.autinc_from && CONSTANT_P (from_addr))
937 data.from_addr = copy_addr_to_reg (from_addr);
938 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.explicit_inc_to = -1;
944 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 data.to_addr = copy_addr_to_reg (to_addr);
948 data.explicit_inc_to = 1;
950 if (!data.autinc_to && CONSTANT_P (to_addr))
951 data.to_addr = copy_addr_to_reg (to_addr);
954 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
955 if (align >= GET_MODE_ALIGNMENT (tmode))
956 align = GET_MODE_ALIGNMENT (tmode);
959 enum machine_mode xmode;
961 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
964 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
965 || SLOW_UNALIGNED_ACCESS (tmode, align))
968 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
971 /* First move what we can in the largest integer mode, then go to
972 successively smaller modes. */
976 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
977 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
978 if (GET_MODE_SIZE (tmode) < max_size)
981 if (mode == VOIDmode)
984 icode = mov_optab->handlers[(int) mode].insn_code;
985 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
986 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988 max_size = GET_MODE_SIZE (mode);
991 /* The code above should have handled everything. */
992 gcc_assert (!data.len);
998 gcc_assert (!data.reverse);
1003 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1004 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1009 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1016 to1 = adjust_address (data.to, QImode, data.offset);
1024 /* Return number of insns required to move L bytes by pieces.
1025 ALIGN (in bits) is maximum alignment we can assume. */
1027 static unsigned HOST_WIDE_INT
1028 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1029 unsigned int max_size)
1031 unsigned HOST_WIDE_INT n_insns = 0;
1032 enum machine_mode tmode;
1034 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1035 if (align >= GET_MODE_ALIGNMENT (tmode))
1036 align = GET_MODE_ALIGNMENT (tmode);
1039 enum machine_mode tmode, xmode;
1041 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1044 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1045 || SLOW_UNALIGNED_ACCESS (tmode, align))
1048 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1051 while (max_size > 1)
1053 enum machine_mode mode = VOIDmode;
1054 enum insn_code icode;
1056 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1057 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1058 if (GET_MODE_SIZE (tmode) < max_size)
1061 if (mode == VOIDmode)
1064 icode = mov_optab->handlers[(int) mode].insn_code;
1065 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1066 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1068 max_size = GET_MODE_SIZE (mode);
1075 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1076 with move instructions for mode MODE. GENFUN is the gen_... function
1077 to make a move insn for that mode. DATA has all the other info. */
1080 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1081 struct move_by_pieces *data)
1083 unsigned int size = GET_MODE_SIZE (mode);
1084 rtx to1 = NULL_RTX, from1;
1086 while (data->len >= size)
1089 data->offset -= size;
1093 if (data->autinc_to)
1094 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1097 to1 = adjust_address (data->to, mode, data->offset);
1100 if (data->autinc_from)
1101 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1104 from1 = adjust_address (data->from, mode, data->offset);
1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1107 emit_insn (gen_add2_insn (data->to_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1110 emit_insn (gen_add2_insn (data->from_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1114 emit_insn ((*genfun) (to1, from1));
1117 #ifdef PUSH_ROUNDING
1118 emit_single_push_insn (mode, from1, NULL);
1124 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1125 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1126 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1127 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1129 if (! data->reverse)
1130 data->offset += size;
1136 /* Emit code to move a block Y to a block X. This may be done with
1137 string-move instructions, with multiple scalar move instructions,
1138 or with a library call.
1140 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1141 SIZE is an rtx that says how long they are.
1142 ALIGN is the maximum alignment we can assume they have.
1143 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145 Return the address of the new block, if memcpy is called and returns it,
1149 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1157 case BLOCK_OP_NORMAL:
1158 case BLOCK_OP_TAILCALL:
1159 may_use_call = true;
1162 case BLOCK_OP_CALL_PARM:
1163 may_use_call = block_move_libcall_safe_for_call_parm ();
1165 /* Make inhibit_defer_pop nonzero around the library call
1166 to force it to pop the arguments right away. */
1170 case BLOCK_OP_NO_LIBCALL:
1171 may_use_call = false;
1178 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1180 gcc_assert (MEM_P (x));
1181 gcc_assert (MEM_P (y));
1184 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1185 block copy is more efficient for other large modes, e.g. DCmode. */
1186 x = adjust_address (x, BLKmode, 0);
1187 y = adjust_address (y, BLKmode, 0);
1189 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1190 can be incorrect is coming from __builtin_memcpy. */
1191 if (GET_CODE (size) == CONST_INT)
1193 if (INTVAL (size) == 0)
1196 x = shallow_copy_rtx (x);
1197 y = shallow_copy_rtx (y);
1198 set_mem_size (x, size);
1199 set_mem_size (y, size);
1202 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1203 move_by_pieces (x, y, INTVAL (size), align, 0);
1204 else if (emit_block_move_via_movmem (x, y, size, align))
1206 else if (may_use_call)
1207 retval = emit_block_move_via_libcall (x, y, size,
1208 method == BLOCK_OP_TAILCALL);
1210 emit_block_move_via_loop (x, y, size, align);
1212 if (method == BLOCK_OP_CALL_PARM)
1218 /* A subroutine of emit_block_move. Returns true if calling the
1219 block move libcall will not clobber any parameters which may have
1220 already been placed on the stack. */
1223 block_move_libcall_safe_for_call_parm (void)
1225 /* If arguments are pushed on the stack, then they're safe. */
1229 /* If registers go on the stack anyway, any argument is sure to clobber
1230 an outgoing argument. */
1231 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1233 tree fn = emit_block_move_libcall_fn (false);
1235 if (REG_PARM_STACK_SPACE (fn) != 0)
1240 /* If any argument goes in memory, then it might clobber an outgoing
1243 CUMULATIVE_ARGS args_so_far;
1246 fn = emit_block_move_libcall_fn (false);
1247 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1249 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1250 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1253 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1254 if (!tmp || !REG_P (tmp))
1256 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1258 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1264 /* A subroutine of emit_block_move. Expand a movmem pattern;
1265 return true if successful. */
1268 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1270 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1271 int save_volatile_ok = volatile_ok;
1272 enum machine_mode mode;
1274 /* Since this is a move insn, we don't care about volatility. */
1277 /* Try the most limited insn first, because there's no point
1278 including more than one in the machine description unless
1279 the more limited one has some advantage. */
1281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1282 mode = GET_MODE_WIDER_MODE (mode))
1284 enum insn_code code = movmem_optab[(int) mode];
1285 insn_operand_predicate_fn pred;
1287 if (code != CODE_FOR_nothing
1288 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1289 here because if SIZE is less than the mode mask, as it is
1290 returned by the macro, it will definitely be less than the
1291 actual mode mask. */
1292 && ((GET_CODE (size) == CONST_INT
1293 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1294 <= (GET_MODE_MASK (mode) >> 1)))
1295 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1296 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1297 || (*pred) (x, BLKmode))
1298 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1299 || (*pred) (y, BLKmode))
1300 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1301 || (*pred) (opalign, VOIDmode)))
1304 rtx last = get_last_insn ();
1307 op2 = convert_to_mode (mode, size, 1);
1308 pred = insn_data[(int) code].operand[2].predicate;
1309 if (pred != 0 && ! (*pred) (op2, mode))
1310 op2 = copy_to_mode_reg (mode, op2);
1312 /* ??? When called via emit_block_move_for_call, it'd be
1313 nice if there were some way to inform the backend, so
1314 that it doesn't fail the expansion because it thinks
1315 emitting the libcall would be more efficient. */
1317 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 volatile_ok = save_volatile_ok;
1325 delete_insns_since (last);
1329 volatile_ok = save_volatile_ok;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 rtx dst_addr, src_addr;
1340 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1349 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1357 size_mode = TYPE_MODE (sizetype);
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree = make_tree (sizetype, size);
1370 fn = emit_block_move_libcall_fn (true);
1371 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1372 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1373 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1375 /* Now we have to build up the CALL_EXPR itself. */
1376 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1377 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1378 call_expr, arg_list, NULL_TREE);
1379 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1381 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1386 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1387 for the function we use for block copies. The first time FOR_CALL
1388 is true, we call assemble_external. */
1390 static GTY(()) tree block_move_fn;
1393 init_block_move_fn (const char *asmspec)
1399 fn = get_identifier ("memcpy");
1400 args = build_function_type_list (ptr_type_node, ptr_type_node,
1401 const_ptr_type_node, sizetype,
1404 fn = build_decl (FUNCTION_DECL, fn, args);
1405 DECL_EXTERNAL (fn) = 1;
1406 TREE_PUBLIC (fn) = 1;
1407 DECL_ARTIFICIAL (fn) = 1;
1408 TREE_NOTHROW (fn) = 1;
1414 set_user_assembler_name (block_move_fn, asmspec);
1418 emit_block_move_libcall_fn (int for_call)
1420 static bool emitted_extern;
1423 init_block_move_fn (NULL);
1425 if (for_call && !emitted_extern)
1427 emitted_extern = true;
1428 make_decl_rtl (block_move_fn);
1429 assemble_external (block_move_fn);
1432 return block_move_fn;
1435 /* A subroutine of emit_block_move. Copy the data via an explicit
1436 loop. This is used only when libcalls are forbidden. */
1437 /* ??? It'd be nice to copy in hunks larger than QImode. */
1440 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1441 unsigned int align ATTRIBUTE_UNUSED)
1443 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1444 enum machine_mode iter_mode;
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1454 emit_move_insn (iter, const0_rtx);
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1463 tmp = convert_modes (Pmode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1465 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1466 x = change_address (x, QImode, x_addr);
1467 y = change_address (y, QImode, y_addr);
1469 emit_move_insn (x, y);
1471 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1472 true, OPTAB_LIB_WIDEN);
1474 emit_move_insn (iter, tmp);
1476 emit_label (cmp_label);
1478 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1482 /* Copy all or part of a value X into registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1486 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1489 #ifdef HAVE_load_multiple
1497 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1498 x = validize_mem (force_const_mem (mode, x));
1500 /* See if the machine can do this with a load multiple insn. */
1501 #ifdef HAVE_load_multiple
1502 if (HAVE_load_multiple)
1504 last = get_last_insn ();
1505 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1513 delete_insns_since (last);
1517 for (i = 0; i < nregs; i++)
1518 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1519 operand_subword_force (x, i, mode));
1522 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1526 move_block_from_reg (int regno, rtx x, int nregs)
1533 /* See if the machine can do this with a store multiple insn. */
1534 #ifdef HAVE_store_multiple
1535 if (HAVE_store_multiple)
1537 rtx last = get_last_insn ();
1538 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1546 delete_insns_since (last);
1550 for (i = 0; i < nregs; i++)
1552 rtx tem = operand_subword (x, i, 1, BLKmode);
1556 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1560 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1561 ORIG, where ORIG is a non-consecutive group of registers represented by
1562 a PARALLEL. The clone is identical to the original except in that the
1563 original set of registers is replaced by a new set of pseudo registers.
1564 The new set has the same modes as the original set. */
1567 gen_group_rtx (rtx orig)
1572 gcc_assert (GET_CODE (orig) == PARALLEL);
1574 length = XVECLEN (orig, 0);
1575 tmps = alloca (sizeof (rtx) * length);
1577 /* Skip a NULL entry in first slot. */
1578 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1583 for (; i < length; i++)
1585 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1586 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1591 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1594 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1595 except that values are placed in TMPS[i], and must later be moved
1596 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1599 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 enum machine_mode m = GET_MODE (orig_src);
1605 gcc_assert (GET_CODE (dst) == PARALLEL);
1608 && !SCALAR_INT_MODE_P (m)
1609 && !MEM_P (orig_src)
1610 && GET_CODE (orig_src) != CONCAT)
1612 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1613 if (imode == BLKmode)
1614 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1616 src = gen_reg_rtx (imode);
1617 if (imode != BLKmode)
1618 src = gen_lowpart (GET_MODE (orig_src), src);
1619 emit_move_insn (src, orig_src);
1620 /* ...and back again. */
1621 if (imode != BLKmode)
1622 src = gen_lowpart (imode, src);
1623 emit_group_load_1 (tmps, dst, src, type, ssize);
1627 /* Check for a NULL entry, used to indicate that the parameter goes
1628 both on the stack and in registers. */
1629 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 /* Process the pieces. */
1635 for (i = start; i < XVECLEN (dst, 0); i++)
1637 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1638 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1639 unsigned int bytelen = GET_MODE_SIZE (mode);
1642 /* Handle trailing fragments that run over the size of the struct. */
1643 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645 /* Arrange to shift the fragment to where it belongs.
1646 extract_bit_field loads to the lsb of the reg. */
1648 #ifdef BLOCK_REG_PADDING
1649 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1650 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1656 bytelen = ssize - bytepos;
1657 gcc_assert (bytelen > 0);
1660 /* If we won't be loading directly from memory, protect the real source
1661 from strange tricks we might play; but make sure that the source can
1662 be loaded directly into the destination. */
1664 if (!MEM_P (orig_src)
1665 && (!CONSTANT_P (orig_src)
1666 || (GET_MODE (orig_src) != mode
1667 && GET_MODE (orig_src) != VOIDmode)))
1669 if (GET_MODE (orig_src) == VOIDmode)
1670 src = gen_reg_rtx (mode);
1672 src = gen_reg_rtx (GET_MODE (orig_src));
1674 emit_move_insn (src, orig_src);
1677 /* Optimize the access just a bit. */
1679 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1680 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1681 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1682 && bytelen == GET_MODE_SIZE (mode))
1684 tmps[i] = gen_reg_rtx (mode);
1685 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687 else if (COMPLEX_MODE_P (mode)
1688 && GET_MODE (src) == mode
1689 && bytelen == GET_MODE_SIZE (mode))
1690 /* Let emit_move_complex do the bulk of the work. */
1692 else if (GET_CODE (src) == CONCAT)
1694 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1695 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697 if ((bytepos == 0 && bytelen == slen0)
1698 || (bytepos != 0 && bytepos + bytelen <= slen))
1700 /* The following assumes that the concatenated objects all
1701 have the same size. In this case, a simple calculation
1702 can be used to determine the object and the bit field
1704 tmps[i] = XEXP (src, bytepos / slen0);
1705 if (! CONSTANT_P (tmps[i])
1706 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1707 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1708 (bytepos % slen0) * BITS_PER_UNIT,
1709 1, NULL_RTX, mode, mode);
1715 gcc_assert (!bytepos);
1716 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1717 emit_move_insn (mem, src);
1718 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1719 0, 1, NULL_RTX, mode, mode);
1722 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1723 SIMD register, which is currently broken. While we get GCC
1724 to emit proper RTL for these cases, let's dump to memory. */
1725 else if (VECTOR_MODE_P (GET_MODE (dst))
1728 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1732 emit_move_insn (mem, src);
1733 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1736 && XVECLEN (dst, 0) > 1)
1737 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1738 else if (CONSTANT_P (src)
1739 || (REG_P (src) && GET_MODE (src) == mode))
1742 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1743 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1747 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1748 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1752 /* Emit code to move a block SRC of type TYPE to a block DST,
1753 where DST is non-consecutive registers represented by a PARALLEL.
1754 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1758 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1763 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1764 emit_group_load_1 (tmps, dst, src, type, ssize);
1766 /* Copy the extracted pieces into the proper (probable) hard regs. */
1767 for (i = 0; i < XVECLEN (dst, 0); i++)
1769 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1772 emit_move_insn (d, tmps[i]);
1776 /* Similar, but load SRC into new pseudos in a format that looks like
1777 PARALLEL. This can later be fed to emit_group_move to get things
1778 in the right place. */
1781 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1786 vec = rtvec_alloc (XVECLEN (parallel, 0));
1787 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1789 /* Convert the vector to look just like the original PARALLEL, except
1790 with the computed values. */
1791 for (i = 0; i < XVECLEN (parallel, 0); i++)
1793 rtx e = XVECEXP (parallel, 0, i);
1794 rtx d = XEXP (e, 0);
1798 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1799 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1807 /* Emit code to move a block SRC to block DST, where SRC and DST are
1808 non-consecutive groups of registers, each represented by a PARALLEL. */
1811 emit_group_move (rtx dst, rtx src)
1815 gcc_assert (GET_CODE (src) == PARALLEL
1816 && GET_CODE (dst) == PARALLEL
1817 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1819 /* Skip first entry if NULL. */
1820 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1821 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1822 XEXP (XVECEXP (src, 0, i), 0));
1825 /* Move a group of registers represented by a PARALLEL into pseudos. */
1828 emit_group_move_into_temps (rtx src)
1830 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1833 for (i = 0; i < XVECLEN (src, 0); i++)
1835 rtx e = XVECEXP (src, 0, i);
1836 rtx d = XEXP (e, 0);
1839 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1840 RTVEC_ELT (vec, i) = e;
1843 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1846 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1847 where SRC is non-consecutive registers represented by a PARALLEL.
1848 SSIZE represents the total size of block ORIG_DST, or -1 if not
1852 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1856 enum machine_mode m = GET_MODE (orig_dst);
1858 gcc_assert (GET_CODE (src) == PARALLEL);
1860 if (!SCALAR_INT_MODE_P (m)
1861 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1863 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1864 if (imode == BLKmode)
1865 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1867 dst = gen_reg_rtx (imode);
1868 emit_group_store (dst, src, type, ssize);
1869 if (imode != BLKmode)
1870 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1871 emit_move_insn (orig_dst, dst);
1875 /* Check for a NULL entry, used to indicate that the parameter goes
1876 both on the stack and in registers. */
1877 if (XEXP (XVECEXP (src, 0, 0), 0))
1882 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1884 /* Copy the (probable) hard regs into pseudos. */
1885 for (i = start; i < XVECLEN (src, 0); i++)
1887 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1888 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1889 emit_move_insn (tmps[i], reg);
1892 /* If we won't be storing directly into memory, protect the real destination
1893 from strange tricks we might play. */
1895 if (GET_CODE (dst) == PARALLEL)
1899 /* We can get a PARALLEL dst if there is a conditional expression in
1900 a return statement. In that case, the dst and src are the same,
1901 so no action is necessary. */
1902 if (rtx_equal_p (dst, src))
1905 /* It is unclear if we can ever reach here, but we may as well handle
1906 it. Allocate a temporary, and split this into a store/load to/from
1909 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1910 emit_group_store (temp, src, type, ssize);
1911 emit_group_load (dst, temp, type, ssize);
1914 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1916 dst = gen_reg_rtx (GET_MODE (orig_dst));
1917 /* Make life a bit easier for combine. */
1918 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1921 /* Process the pieces. */
1922 for (i = start; i < XVECLEN (src, 0); i++)
1924 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1925 enum machine_mode mode = GET_MODE (tmps[i]);
1926 unsigned int bytelen = GET_MODE_SIZE (mode);
1929 /* Handle trailing fragments that run over the size of the struct. */
1930 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1932 /* store_bit_field always takes its value from the lsb.
1933 Move the fragment to the lsb if it's not already there. */
1935 #ifdef BLOCK_REG_PADDING
1936 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1937 == (BYTES_BIG_ENDIAN ? upward : downward)
1943 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1944 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1945 build_int_cst (NULL_TREE, shift),
1948 bytelen = ssize - bytepos;
1951 if (GET_CODE (dst) == CONCAT)
1953 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1954 dest = XEXP (dst, 0);
1955 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1957 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1958 dest = XEXP (dst, 1);
1962 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1963 dest = assign_stack_temp (GET_MODE (dest),
1964 GET_MODE_SIZE (GET_MODE (dest)), 0);
1965 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1972 /* Optimize the access just a bit. */
1974 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1975 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1976 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1977 && bytelen == GET_MODE_SIZE (mode))
1978 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1980 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1984 /* Copy from the pseudo into the (probable) hard reg. */
1985 if (orig_dst != dst)
1986 emit_move_insn (orig_dst, dst);
1989 /* Generate code to copy a BLKmode object of TYPE out of a
1990 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1991 is null, a stack temporary is created. TGTBLK is returned.
1993 The purpose of this routine is to handle functions that return
1994 BLKmode structures in registers. Some machines (the PA for example)
1995 want to return all small structures in registers regardless of the
1996 structure's alignment. */
1999 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2001 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2002 rtx src = NULL, dst = NULL;
2003 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2004 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2008 tgtblk = assign_temp (build_qualified_type (type,
2010 | TYPE_QUAL_CONST)),
2012 preserve_temp_slots (tgtblk);
2015 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2016 into a new pseudo which is a full word. */
2018 if (GET_MODE (srcreg) != BLKmode
2019 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2020 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2022 /* If the structure doesn't take up a whole number of words, see whether
2023 SRCREG is padded on the left or on the right. If it's on the left,
2024 set PADDING_CORRECTION to the number of bits to skip.
2026 In most ABIs, the structure will be returned at the least end of
2027 the register, which translates to right padding on little-endian
2028 targets and left padding on big-endian targets. The opposite
2029 holds if the structure is returned at the most significant
2030 end of the register. */
2031 if (bytes % UNITS_PER_WORD != 0
2032 && (targetm.calls.return_in_msb (type)
2034 : BYTES_BIG_ENDIAN))
2036 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2038 /* Copy the structure BITSIZE bites at a time.
2040 We could probably emit more efficient code for machines which do not use
2041 strict alignment, but it doesn't seem worth the effort at the current
2043 for (bitpos = 0, xbitpos = padding_correction;
2044 bitpos < bytes * BITS_PER_UNIT;
2045 bitpos += bitsize, xbitpos += bitsize)
2047 /* We need a new source operand each time xbitpos is on a
2048 word boundary and when xbitpos == padding_correction
2049 (the first time through). */
2050 if (xbitpos % BITS_PER_WORD == 0
2051 || xbitpos == padding_correction)
2052 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2055 /* We need a new destination operand each time bitpos is on
2057 if (bitpos % BITS_PER_WORD == 0)
2058 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2060 /* Use xbitpos for the source extraction (right justified) and
2061 xbitpos for the destination store (left justified). */
2062 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2063 extract_bit_field (src, bitsize,
2064 xbitpos % BITS_PER_WORD, 1,
2065 NULL_RTX, word_mode, word_mode));
2071 /* Add a USE expression for REG to the (possibly empty) list pointed
2072 to by CALL_FUSAGE. REG must denote a hard register. */
2075 use_reg (rtx *call_fusage, rtx reg)
2077 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2080 = gen_rtx_EXPR_LIST (VOIDmode,
2081 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2084 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2085 starting at REGNO. All of these registers must be hard registers. */
2088 use_regs (rtx *call_fusage, int regno, int nregs)
2092 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2094 for (i = 0; i < nregs; i++)
2095 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2098 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2099 PARALLEL REGS. This is for calls that pass values in multiple
2100 non-contiguous locations. The Irix 6 ABI has examples of this. */
2103 use_group_regs (rtx *call_fusage, rtx regs)
2107 for (i = 0; i < XVECLEN (regs, 0); i++)
2109 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2111 /* A NULL entry means the parameter goes both on the stack and in
2112 registers. This can also be a MEM for targets that pass values
2113 partially on the stack and partially in registers. */
2114 if (reg != 0 && REG_P (reg))
2115 use_reg (call_fusage, reg);
2120 /* Determine whether the LEN bytes generated by CONSTFUN can be
2121 stored to memory using several move instructions. CONSTFUNDATA is
2122 a pointer which will be passed as argument in every CONSTFUN call.
2123 ALIGN is maximum alignment we can assume. Return nonzero if a
2124 call to store_by_pieces should succeed. */
2127 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2129 void *constfundata, unsigned int align)
2131 unsigned HOST_WIDE_INT l;
2132 unsigned int max_size;
2133 HOST_WIDE_INT offset = 0;
2134 enum machine_mode mode, tmode;
2135 enum insn_code icode;
2142 if (! STORE_BY_PIECES_P (len, align))
2145 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2146 if (align >= GET_MODE_ALIGNMENT (tmode))
2147 align = GET_MODE_ALIGNMENT (tmode);
2150 enum machine_mode xmode;
2152 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2154 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2155 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2156 || SLOW_UNALIGNED_ACCESS (tmode, align))
2159 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2162 /* We would first store what we can in the largest integer mode, then go to
2163 successively smaller modes. */
2166 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2171 max_size = STORE_MAX_PIECES + 1;
2172 while (max_size > 1)
2174 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2175 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2176 if (GET_MODE_SIZE (tmode) < max_size)
2179 if (mode == VOIDmode)
2182 icode = mov_optab->handlers[(int) mode].insn_code;
2183 if (icode != CODE_FOR_nothing
2184 && align >= GET_MODE_ALIGNMENT (mode))
2186 unsigned int size = GET_MODE_SIZE (mode);
2193 cst = (*constfun) (constfundata, offset, mode);
2194 if (!LEGITIMATE_CONSTANT_P (cst))
2204 max_size = GET_MODE_SIZE (mode);
2207 /* The code above should have handled everything. */
2214 /* Generate several move instructions to store LEN bytes generated by
2215 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2216 pointer which will be passed as argument in every CONSTFUN call.
2217 ALIGN is maximum alignment we can assume.
2218 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2219 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2223 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2224 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2225 void *constfundata, unsigned int align, int endp)
2227 struct store_by_pieces data;
2231 gcc_assert (endp != 2);
2235 gcc_assert (STORE_BY_PIECES_P (len, align));
2236 data.constfun = constfun;
2237 data.constfundata = constfundata;
2240 store_by_pieces_1 (&data, align);
2245 gcc_assert (!data.reverse);
2250 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2251 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2253 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2256 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2263 to1 = adjust_address (data.to, QImode, data.offset);
2271 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2272 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2275 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2277 struct store_by_pieces data;
2282 data.constfun = clear_by_pieces_1;
2283 data.constfundata = NULL;
2286 store_by_pieces_1 (&data, align);
2289 /* Callback routine for clear_by_pieces.
2290 Return const0_rtx unconditionally. */
2293 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2294 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2295 enum machine_mode mode ATTRIBUTE_UNUSED)
2300 /* Subroutine of clear_by_pieces and store_by_pieces.
2301 Generate several move instructions to store LEN bytes of block TO. (A MEM
2302 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2305 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2306 unsigned int align ATTRIBUTE_UNUSED)
2308 rtx to_addr = XEXP (data->to, 0);
2309 unsigned int max_size = STORE_MAX_PIECES + 1;
2310 enum machine_mode mode = VOIDmode, tmode;
2311 enum insn_code icode;
2314 data->to_addr = to_addr;
2316 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2317 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2319 data->explicit_inc_to = 0;
2321 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2323 data->offset = data->len;
2325 /* If storing requires more than two move insns,
2326 copy addresses to registers (to make displacements shorter)
2327 and use post-increment if available. */
2328 if (!data->autinc_to
2329 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2331 /* Determine the main mode we'll be using. */
2332 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2333 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2334 if (GET_MODE_SIZE (tmode) < max_size)
2337 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2339 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2340 data->autinc_to = 1;
2341 data->explicit_inc_to = -1;
2344 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2345 && ! data->autinc_to)
2347 data->to_addr = copy_addr_to_reg (to_addr);
2348 data->autinc_to = 1;
2349 data->explicit_inc_to = 1;
2352 if ( !data->autinc_to && CONSTANT_P (to_addr))
2353 data->to_addr = copy_addr_to_reg (to_addr);
2356 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2357 if (align >= GET_MODE_ALIGNMENT (tmode))
2358 align = GET_MODE_ALIGNMENT (tmode);
2361 enum machine_mode xmode;
2363 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2365 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2366 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2367 || SLOW_UNALIGNED_ACCESS (tmode, align))
2370 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2373 /* First store what we can in the largest integer mode, then go to
2374 successively smaller modes. */
2376 while (max_size > 1)
2378 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2379 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2380 if (GET_MODE_SIZE (tmode) < max_size)
2383 if (mode == VOIDmode)
2386 icode = mov_optab->handlers[(int) mode].insn_code;
2387 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2388 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2390 max_size = GET_MODE_SIZE (mode);
2393 /* The code above should have handled everything. */
2394 gcc_assert (!data->len);
2397 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2398 with move instructions for mode MODE. GENFUN is the gen_... function
2399 to make a move insn for that mode. DATA has all the other info. */
2402 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2403 struct store_by_pieces *data)
2405 unsigned int size = GET_MODE_SIZE (mode);
2408 while (data->len >= size)
2411 data->offset -= size;
2413 if (data->autinc_to)
2414 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2417 to1 = adjust_address (data->to, mode, data->offset);
2419 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2420 emit_insn (gen_add2_insn (data->to_addr,
2421 GEN_INT (-(HOST_WIDE_INT) size)));
2423 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2424 emit_insn ((*genfun) (to1, cst));
2426 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2427 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2429 if (! data->reverse)
2430 data->offset += size;
2436 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2437 its length in bytes. */
2440 clear_storage (rtx object, rtx size, enum block_op_methods method)
2442 enum machine_mode mode = GET_MODE (object);
2445 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2447 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2448 just move a zero. Otherwise, do this a piece at a time. */
2450 && GET_CODE (size) == CONST_INT
2451 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2453 rtx zero = CONST0_RTX (mode);
2456 emit_move_insn (object, zero);
2460 if (COMPLEX_MODE_P (mode))
2462 zero = CONST0_RTX (GET_MODE_INNER (mode));
2465 write_complex_part (object, zero, 0);
2466 write_complex_part (object, zero, 1);
2472 if (size == const0_rtx)
2475 align = MEM_ALIGN (object);
2477 if (GET_CODE (size) == CONST_INT
2478 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2479 clear_by_pieces (object, INTVAL (size), align);
2480 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2483 return clear_storage_via_libcall (object, size,
2484 method == BLOCK_OP_TAILCALL);
2489 /* A subroutine of clear_storage. Expand a call to memset.
2490 Return the return value of memset, 0 otherwise. */
2493 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2495 tree call_expr, arg_list, fn, object_tree, size_tree;
2496 enum machine_mode size_mode;
2499 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2500 place those into new pseudos into a VAR_DECL and use them later. */
2502 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2504 size_mode = TYPE_MODE (sizetype);
2505 size = convert_to_mode (size_mode, size, 1);
2506 size = copy_to_mode_reg (size_mode, size);
2508 /* It is incorrect to use the libcall calling conventions to call
2509 memset in this context. This could be a user call to memset and
2510 the user may wish to examine the return value from memset. For
2511 targets where libcalls and normal calls have different conventions
2512 for returning pointers, we could end up generating incorrect code. */
2514 object_tree = make_tree (ptr_type_node, object);
2515 size_tree = make_tree (sizetype, size);
2517 fn = clear_storage_libcall_fn (true);
2518 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2519 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2520 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2522 /* Now we have to build up the CALL_EXPR itself. */
2523 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2524 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2525 call_expr, arg_list, NULL_TREE);
2526 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2528 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2533 /* A subroutine of clear_storage_via_libcall. Create the tree node
2534 for the function we use for block clears. The first time FOR_CALL
2535 is true, we call assemble_external. */
2537 static GTY(()) tree block_clear_fn;
2540 init_block_clear_fn (const char *asmspec)
2542 if (!block_clear_fn)
2546 fn = get_identifier ("memset");
2547 args = build_function_type_list (ptr_type_node, ptr_type_node,
2548 integer_type_node, sizetype,
2551 fn = build_decl (FUNCTION_DECL, fn, args);
2552 DECL_EXTERNAL (fn) = 1;
2553 TREE_PUBLIC (fn) = 1;
2554 DECL_ARTIFICIAL (fn) = 1;
2555 TREE_NOTHROW (fn) = 1;
2557 block_clear_fn = fn;
2561 set_user_assembler_name (block_clear_fn, asmspec);
2565 clear_storage_libcall_fn (int for_call)
2567 static bool emitted_extern;
2569 if (!block_clear_fn)
2570 init_block_clear_fn (NULL);
2572 if (for_call && !emitted_extern)
2574 emitted_extern = true;
2575 make_decl_rtl (block_clear_fn);
2576 assemble_external (block_clear_fn);
2579 return block_clear_fn;
2582 /* Expand a setmem pattern; return true if successful. */
2585 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2587 /* Try the most limited insn first, because there's no point
2588 including more than one in the machine description unless
2589 the more limited one has some advantage. */
2591 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2592 enum machine_mode mode;
2594 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2595 mode = GET_MODE_WIDER_MODE (mode))
2597 enum insn_code code = setmem_optab[(int) mode];
2598 insn_operand_predicate_fn pred;
2600 if (code != CODE_FOR_nothing
2601 /* We don't need MODE to be narrower than
2602 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2603 the mode mask, as it is returned by the macro, it will
2604 definitely be less than the actual mode mask. */
2605 && ((GET_CODE (size) == CONST_INT
2606 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2607 <= (GET_MODE_MASK (mode) >> 1)))
2608 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2609 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2610 || (*pred) (object, BLKmode))
2611 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2612 || (*pred) (opalign, VOIDmode)))
2615 rtx last = get_last_insn ();
2618 opsize = convert_to_mode (mode, size, 1);
2619 pred = insn_data[(int) code].operand[1].predicate;
2620 if (pred != 0 && ! (*pred) (opsize, mode))
2621 opsize = copy_to_mode_reg (mode, opsize);
2623 opchar = convert_to_mode (mode, val, 1);
2624 pred = insn_data[(int) code].operand[2].predicate;
2625 if (pred != 0 && ! (*pred) (opchar, mode))
2626 opchar = copy_to_mode_reg (mode, opchar);
2628 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2635 delete_insns_since (last);
2643 /* Write to one of the components of the complex value CPLX. Write VAL to
2644 the real part if IMAG_P is false, and the imaginary part if its true. */
2647 write_complex_part (rtx cplx, rtx val, bool imag_p)
2649 enum machine_mode cmode;
2650 enum machine_mode imode;
2653 if (GET_CODE (cplx) == CONCAT)
2655 emit_move_insn (XEXP (cplx, imag_p), val);
2659 cmode = GET_MODE (cplx);
2660 imode = GET_MODE_INNER (cmode);
2661 ibitsize = GET_MODE_BITSIZE (imode);
2663 /* For MEMs simplify_gen_subreg may generate an invalid new address
2664 because, e.g., the original address is considered mode-dependent
2665 by the target, which restricts simplify_subreg from invoking
2666 adjust_address_nv. Instead of preparing fallback support for an
2667 invalid address, we call adjust_address_nv directly. */
2669 emit_move_insn (adjust_address_nv (cplx, imode,
2670 imag_p ? GET_MODE_SIZE (imode) : 0),
2673 /* If the sub-object is at least word sized, then we know that subregging
2674 will work. This special case is important, since store_bit_field
2675 wants to operate on integer modes, and there's rarely an OImode to
2676 correspond to TCmode. */
2677 if (ibitsize >= BITS_PER_WORD
2678 /* For hard regs we have exact predicates. Assume we can split
2679 the original object if it spans an even number of hard regs.
2680 This special case is important for SCmode on 64-bit platforms
2681 where the natural size of floating-point regs is 32-bit. */
2683 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2684 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2686 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2687 imag_p ? GET_MODE_SIZE (imode) : 0);
2690 emit_move_insn (part, val);
2694 /* simplify_gen_subreg may fail for sub-word MEMs. */
2695 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2698 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2701 /* Extract one of the components of the complex value CPLX. Extract the
2702 real part if IMAG_P is false, and the imaginary part if it's true. */
2705 read_complex_part (rtx cplx, bool imag_p)
2707 enum machine_mode cmode, imode;
2710 if (GET_CODE (cplx) == CONCAT)
2711 return XEXP (cplx, imag_p);
2713 cmode = GET_MODE (cplx);
2714 imode = GET_MODE_INNER (cmode);
2715 ibitsize = GET_MODE_BITSIZE (imode);
2717 /* Special case reads from complex constants that got spilled to memory. */
2718 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2720 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2721 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2723 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2724 if (CONSTANT_CLASS_P (part))
2725 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2729 /* For MEMs simplify_gen_subreg may generate an invalid new address
2730 because, e.g., the original address is considered mode-dependent
2731 by the target, which restricts simplify_subreg from invoking
2732 adjust_address_nv. Instead of preparing fallback support for an
2733 invalid address, we call adjust_address_nv directly. */
2735 return adjust_address_nv (cplx, imode,
2736 imag_p ? GET_MODE_SIZE (imode) : 0);
2738 /* If the sub-object is at least word sized, then we know that subregging
2739 will work. This special case is important, since extract_bit_field
2740 wants to operate on integer modes, and there's rarely an OImode to
2741 correspond to TCmode. */
2742 if (ibitsize >= BITS_PER_WORD
2743 /* For hard regs we have exact predicates. Assume we can split
2744 the original object if it spans an even number of hard regs.
2745 This special case is important for SCmode on 64-bit platforms
2746 where the natural size of floating-point regs is 32-bit. */
2748 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2749 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2751 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2752 imag_p ? GET_MODE_SIZE (imode) : 0);
2756 /* simplify_gen_subreg may fail for sub-word MEMs. */
2757 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2760 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2761 true, NULL_RTX, imode, imode);
2764 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2765 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2766 represented in NEW_MODE. If FORCE is true, this will never happen, as
2767 we'll force-create a SUBREG if needed. */
2770 emit_move_change_mode (enum machine_mode new_mode,
2771 enum machine_mode old_mode, rtx x, bool force)
2775 if (reload_in_progress && MEM_P (x))
2777 /* We can't use gen_lowpart here because it may call change_address
2778 which is not appropriate if we were called when a reload was in
2779 progress. We don't have to worry about changing the address since
2780 the size in bytes is supposed to be the same. Copy the MEM to
2781 change the mode and move any substitutions from the old MEM to
2784 ret = adjust_address_nv (x, new_mode, 0);
2785 copy_replacements (x, ret);
2789 /* Note that we do want simplify_subreg's behavior of validating
2790 that the new mode is ok for a hard register. If we were to use
2791 simplify_gen_subreg, we would create the subreg, but would
2792 probably run into the target not being able to implement it. */
2793 /* Except, of course, when FORCE is true, when this is exactly what
2794 we want. Which is needed for CCmodes on some targets. */
2796 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2798 ret = simplify_subreg (new_mode, x, old_mode, 0);
2804 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2805 an integer mode of the same size as MODE. Returns the instruction
2806 emitted, or NULL if such a move could not be generated. */
2809 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2811 enum machine_mode imode;
2812 enum insn_code code;
2814 /* There must exist a mode of the exact size we require. */
2815 imode = int_mode_for_mode (mode);
2816 if (imode == BLKmode)
2819 /* The target must support moves in this mode. */
2820 code = mov_optab->handlers[imode].insn_code;
2821 if (code == CODE_FOR_nothing)
2824 x = emit_move_change_mode (imode, mode, x, false);
2827 y = emit_move_change_mode (imode, mode, y, false);
2830 return emit_insn (GEN_FCN (code) (x, y));
2833 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2834 Return an equivalent MEM that does not use an auto-increment. */
2837 emit_move_resolve_push (enum machine_mode mode, rtx x)
2839 enum rtx_code code = GET_CODE (XEXP (x, 0));
2840 HOST_WIDE_INT adjust;
2843 adjust = GET_MODE_SIZE (mode);
2844 #ifdef PUSH_ROUNDING
2845 adjust = PUSH_ROUNDING (adjust);
2847 if (code == PRE_DEC || code == POST_DEC)
2850 /* Do not use anti_adjust_stack, since we don't want to update
2851 stack_pointer_delta. */
2852 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2853 GEN_INT (adjust), stack_pointer_rtx,
2854 0, OPTAB_LIB_WIDEN);
2855 if (temp != stack_pointer_rtx)
2856 emit_move_insn (stack_pointer_rtx, temp);
2862 temp = stack_pointer_rtx;
2865 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2868 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2874 return replace_equiv_address (x, temp);
2877 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2878 X is known to satisfy push_operand, and MODE is known to be complex.
2879 Returns the last instruction emitted. */
2882 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2884 enum machine_mode submode = GET_MODE_INNER (mode);
2887 #ifdef PUSH_ROUNDING
2888 unsigned int submodesize = GET_MODE_SIZE (submode);
2890 /* In case we output to the stack, but the size is smaller than the
2891 machine can push exactly, we need to use move instructions. */
2892 if (PUSH_ROUNDING (submodesize) != submodesize)
2894 x = emit_move_resolve_push (mode, x);
2895 return emit_move_insn (x, y);
2899 /* Note that the real part always precedes the imag part in memory
2900 regardless of machine's endianness. */
2901 switch (GET_CODE (XEXP (x, 0)))
2915 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2916 read_complex_part (y, imag_first));
2917 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 read_complex_part (y, !imag_first));
2921 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2922 MODE is known to be complex. Returns the last instruction emitted. */
2925 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2929 /* Need to take special care for pushes, to maintain proper ordering
2930 of the data, and possibly extra padding. */
2931 if (push_operand (x, mode))
2932 return emit_move_complex_push (mode, x, y);
2934 /* See if we can coerce the target into moving both values at once. */
2936 /* Move floating point as parts. */
2937 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2938 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2940 /* Not possible if the values are inherently not adjacent. */
2941 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2943 /* Is possible if both are registers (or subregs of registers). */
2944 else if (register_operand (x, mode) && register_operand (y, mode))
2946 /* If one of the operands is a memory, and alignment constraints
2947 are friendly enough, we may be able to do combined memory operations.
2948 We do not attempt this if Y is a constant because that combination is
2949 usually better with the by-parts thing below. */
2950 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2951 && (!STRICT_ALIGNMENT
2952 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2961 /* For memory to memory moves, optimal behavior can be had with the
2962 existing block move logic. */
2963 if (MEM_P (x) && MEM_P (y))
2965 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2966 BLOCK_OP_NO_LIBCALL);
2967 return get_last_insn ();
2970 ret = emit_move_via_integer (mode, x, y);
2975 /* Show the output dies here. This is necessary for SUBREGs
2976 of pseudos since we cannot track their lifetimes correctly;
2977 hard regs shouldn't appear here except as return values. */
2978 if (!reload_completed && !reload_in_progress
2979 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2980 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2982 write_complex_part (x, read_complex_part (y, false), false);
2983 write_complex_part (x, read_complex_part (y, true), true);
2984 return get_last_insn ();
2987 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2988 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2991 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2995 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2998 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2999 if (code != CODE_FOR_nothing)
3001 x = emit_move_change_mode (CCmode, mode, x, true);
3002 y = emit_move_change_mode (CCmode, mode, y, true);
3003 return emit_insn (GEN_FCN (code) (x, y));
3007 /* Otherwise, find the MODE_INT mode of the same width. */
3008 ret = emit_move_via_integer (mode, x, y);
3009 gcc_assert (ret != NULL);
3013 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3014 MODE is any multi-word or full-word mode that lacks a move_insn
3015 pattern. Note that you will get better code if you define such
3016 patterns, even if they must turn into multiple assembler instructions. */
3019 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3026 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3028 /* If X is a push on the stack, do the push now and replace
3029 X with a reference to the stack pointer. */
3030 if (push_operand (x, mode))
3031 x = emit_move_resolve_push (mode, x);
3033 /* If we are in reload, see if either operand is a MEM whose address
3034 is scheduled for replacement. */
3035 if (reload_in_progress && MEM_P (x)
3036 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3037 x = replace_equiv_address_nv (x, inner);
3038 if (reload_in_progress && MEM_P (y)
3039 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3040 y = replace_equiv_address_nv (y, inner);
3044 need_clobber = false;
3046 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3049 rtx xpart = operand_subword (x, i, 1, mode);
3050 rtx ypart = operand_subword (y, i, 1, mode);
3052 /* If we can't get a part of Y, put Y into memory if it is a
3053 constant. Otherwise, force it into a register. Then we must
3054 be able to get a part of Y. */
3055 if (ypart == 0 && CONSTANT_P (y))
3057 y = force_const_mem (mode, y);
3058 ypart = operand_subword (y, i, 1, mode);
3060 else if (ypart == 0)
3061 ypart = operand_subword_force (y, i, mode);
3063 gcc_assert (xpart && ypart);
3065 need_clobber |= (GET_CODE (xpart) == SUBREG);
3067 last_insn = emit_move_insn (xpart, ypart);
3073 /* Show the output dies here. This is necessary for SUBREGs
3074 of pseudos since we cannot track their lifetimes correctly;
3075 hard regs shouldn't appear here except as return values.
3076 We never want to emit such a clobber after reload. */
3078 && ! (reload_in_progress || reload_completed)
3079 && need_clobber != 0)
3080 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3087 /* Low level part of emit_move_insn.
3088 Called just like emit_move_insn, but assumes X and Y
3089 are basically valid. */
3092 emit_move_insn_1 (rtx x, rtx y)
3094 enum machine_mode mode = GET_MODE (x);
3095 enum insn_code code;
3097 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3099 code = mov_optab->handlers[mode].insn_code;
3100 if (code != CODE_FOR_nothing)
3101 return emit_insn (GEN_FCN (code) (x, y));
3103 /* Expand complex moves by moving real part and imag part. */
3104 if (COMPLEX_MODE_P (mode))
3105 return emit_move_complex (mode, x, y);
3107 if (GET_MODE_CLASS (mode) == MODE_CC)
3108 return emit_move_ccmode (mode, x, y);
3110 /* Try using a move pattern for the corresponding integer mode. This is
3111 only safe when simplify_subreg can convert MODE constants into integer
3112 constants. At present, it can only do this reliably if the value
3113 fits within a HOST_WIDE_INT. */
3114 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3116 rtx ret = emit_move_via_integer (mode, x, y);
3121 return emit_move_multi_word (mode, x, y);
3124 /* Generate code to copy Y into X.
3125 Both Y and X must have the same mode, except that
3126 Y can be a constant with VOIDmode.
3127 This mode cannot be BLKmode; use emit_block_move for that.
3129 Return the last instruction emitted. */
3132 emit_move_insn (rtx x, rtx y)
3134 enum machine_mode mode = GET_MODE (x);
3135 rtx y_cst = NULL_RTX;
3138 gcc_assert (mode != BLKmode
3139 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3144 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3145 && (last_insn = compress_float_constant (x, y)))
3150 if (!LEGITIMATE_CONSTANT_P (y))
3152 y = force_const_mem (mode, y);
3154 /* If the target's cannot_force_const_mem prevented the spill,
3155 assume that the target's move expanders will also take care
3156 of the non-legitimate constant. */
3162 /* If X or Y are memory references, verify that their addresses are valid
3165 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3166 && ! push_operand (x, GET_MODE (x)))
3168 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3169 x = validize_mem (x);
3172 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3174 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3175 y = validize_mem (y);
3177 gcc_assert (mode != BLKmode);
3179 last_insn = emit_move_insn_1 (x, y);
3181 if (y_cst && REG_P (x)
3182 && (set = single_set (last_insn)) != NULL_RTX
3183 && SET_DEST (set) == x
3184 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3185 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3190 /* If Y is representable exactly in a narrower mode, and the target can
3191 perform the extension directly from constant or memory, then emit the
3192 move as an extension. */
3195 compress_float_constant (rtx x, rtx y)
3197 enum machine_mode dstmode = GET_MODE (x);
3198 enum machine_mode orig_srcmode = GET_MODE (y);
3199 enum machine_mode srcmode;
3202 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3204 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3205 srcmode != orig_srcmode;
3206 srcmode = GET_MODE_WIDER_MODE (srcmode))
3209 rtx trunc_y, last_insn;
3211 /* Skip if the target can't extend this way. */
3212 ic = can_extend_p (dstmode, srcmode, 0);
3213 if (ic == CODE_FOR_nothing)
3216 /* Skip if the narrowed value isn't exact. */
3217 if (! exact_real_truncate (srcmode, &r))
3220 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3222 if (LEGITIMATE_CONSTANT_P (trunc_y))
3224 /* Skip if the target needs extra instructions to perform
3226 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3229 else if (float_extend_from_mem[dstmode][srcmode])
3230 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3234 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3235 last_insn = get_last_insn ();
3238 set_unique_reg_note (last_insn, REG_EQUAL, y);
3246 /* Pushing data onto the stack. */
3248 /* Push a block of length SIZE (perhaps variable)
3249 and return an rtx to address the beginning of the block.
3250 The value may be virtual_outgoing_args_rtx.
3252 EXTRA is the number of bytes of padding to push in addition to SIZE.
3253 BELOW nonzero means this padding comes at low addresses;
3254 otherwise, the padding comes at high addresses. */
3257 push_block (rtx size, int extra, int below)
3261 size = convert_modes (Pmode, ptr_mode, size, 1);
3262 if (CONSTANT_P (size))
3263 anti_adjust_stack (plus_constant (size, extra));
3264 else if (REG_P (size) && extra == 0)
3265 anti_adjust_stack (size);
3268 temp = copy_to_mode_reg (Pmode, size);
3270 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3271 temp, 0, OPTAB_LIB_WIDEN);
3272 anti_adjust_stack (temp);
3275 #ifndef STACK_GROWS_DOWNWARD
3281 temp = virtual_outgoing_args_rtx;
3282 if (extra != 0 && below)
3283 temp = plus_constant (temp, extra);
3287 if (GET_CODE (size) == CONST_INT)
3288 temp = plus_constant (virtual_outgoing_args_rtx,
3289 -INTVAL (size) - (below ? 0 : extra));
3290 else if (extra != 0 && !below)
3291 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3292 negate_rtx (Pmode, plus_constant (size, extra)));
3294 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3295 negate_rtx (Pmode, size));
3298 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3301 #ifdef PUSH_ROUNDING
3303 /* Emit single push insn. */
3306 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3309 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3311 enum insn_code icode;
3312 insn_operand_predicate_fn pred;
3314 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3315 /* If there is push pattern, use it. Otherwise try old way of throwing
3316 MEM representing push operation to move expander. */
3317 icode = push_optab->handlers[(int) mode].insn_code;
3318 if (icode != CODE_FOR_nothing)
3320 if (((pred = insn_data[(int) icode].operand[0].predicate)
3321 && !((*pred) (x, mode))))
3322 x = force_reg (mode, x);
3323 emit_insn (GEN_FCN (icode) (x));
3326 if (GET_MODE_SIZE (mode) == rounded_size)
3327 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3328 /* If we are to pad downward, adjust the stack pointer first and
3329 then store X into the stack location using an offset. This is
3330 because emit_move_insn does not know how to pad; it does not have
3332 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3334 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3335 HOST_WIDE_INT offset;
3337 emit_move_insn (stack_pointer_rtx,
3338 expand_binop (Pmode,
3339 #ifdef STACK_GROWS_DOWNWARD
3345 GEN_INT (rounded_size),
3346 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3348 offset = (HOST_WIDE_INT) padding_size;
3349 #ifdef STACK_GROWS_DOWNWARD
3350 if (STACK_PUSH_CODE == POST_DEC)
3351 /* We have already decremented the stack pointer, so get the
3353 offset += (HOST_WIDE_INT) rounded_size;
3355 if (STACK_PUSH_CODE == POST_INC)
3356 /* We have already incremented the stack pointer, so get the
3358 offset -= (HOST_WIDE_INT) rounded_size;
3360 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3364 #ifdef STACK_GROWS_DOWNWARD
3365 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3366 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3367 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3369 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3370 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3371 GEN_INT (rounded_size));
3373 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3376 dest = gen_rtx_MEM (mode, dest_addr);
3380 set_mem_attributes (dest, type, 1);
3382 if (flag_optimize_sibling_calls)
3383 /* Function incoming arguments may overlap with sibling call
3384 outgoing arguments and we cannot allow reordering of reads
3385 from function arguments with stores to outgoing arguments
3386 of sibling calls. */
3387 set_mem_alias_set (dest, 0);
3389 emit_move_insn (dest, x);
3393 /* Generate code to push X onto the stack, assuming it has mode MODE and
3395 MODE is redundant except when X is a CONST_INT (since they don't
3397 SIZE is an rtx for the size of data to be copied (in bytes),
3398 needed only if X is BLKmode.
3400 ALIGN (in bits) is maximum alignment we can assume.
3402 If PARTIAL and REG are both nonzero, then copy that many of the first
3403 bytes of X into registers starting with REG, and push the rest of X.
3404 The amount of space pushed is decreased by PARTIAL bytes.
3405 REG must be a hard register in this case.
3406 If REG is zero but PARTIAL is not, take any all others actions for an
3407 argument partially in registers, but do not actually load any
3410 EXTRA is the amount in bytes of extra space to leave next to this arg.
3411 This is ignored if an argument block has already been allocated.
3413 On a machine that lacks real push insns, ARGS_ADDR is the address of
3414 the bottom of the argument block for this call. We use indexing off there
3415 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3416 argument block has not been preallocated.
3418 ARGS_SO_FAR is the size of args previously pushed for this call.
3420 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3421 for arguments passed in registers. If nonzero, it will be the number
3422 of bytes required. */
3425 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3426 unsigned int align, int partial, rtx reg, int extra,
3427 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3431 enum direction stack_direction
3432 #ifdef STACK_GROWS_DOWNWARD
3438 /* Decide where to pad the argument: `downward' for below,
3439 `upward' for above, or `none' for don't pad it.
3440 Default is below for small data on big-endian machines; else above. */
3441 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3443 /* Invert direction if stack is post-decrement.
3445 if (STACK_PUSH_CODE == POST_DEC)
3446 if (where_pad != none)
3447 where_pad = (where_pad == downward ? upward : downward);
3451 if (mode == BLKmode)
3453 /* Copy a block into the stack, entirely or partially. */
3460 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3461 used = partial - offset;
3465 /* USED is now the # of bytes we need not copy to the stack
3466 because registers will take care of them. */
3469 xinner = adjust_address (xinner, BLKmode, used);
3471 /* If the partial register-part of the arg counts in its stack size,
3472 skip the part of stack space corresponding to the registers.
3473 Otherwise, start copying to the beginning of the stack space,
3474 by setting SKIP to 0. */
3475 skip = (reg_parm_stack_space == 0) ? 0 : used;
3477 #ifdef PUSH_ROUNDING
3478 /* Do it with several push insns if that doesn't take lots of insns
3479 and if there is no difficulty with push insns that skip bytes
3480 on the stack for alignment purposes. */
3483 && GET_CODE (size) == CONST_INT
3485 && MEM_ALIGN (xinner) >= align
3486 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3487 /* Here we avoid the case of a structure whose weak alignment
3488 forces many pushes of a small amount of data,
3489 and such small pushes do rounding that causes trouble. */
3490 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3491 || align >= BIGGEST_ALIGNMENT
3492 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3493 == (align / BITS_PER_UNIT)))
3494 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3496 /* Push padding now if padding above and stack grows down,
3497 or if padding below and stack grows up.
3498 But if space already allocated, this has already been done. */
3499 if (extra && args_addr == 0
3500 && where_pad != none && where_pad != stack_direction)
3501 anti_adjust_stack (GEN_INT (extra));
3503 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3506 #endif /* PUSH_ROUNDING */
3510 /* Otherwise make space on the stack and copy the data
3511 to the address of that space. */
3513 /* Deduct words put into registers from the size we must copy. */
3516 if (GET_CODE (size) == CONST_INT)
3517 size = GEN_INT (INTVAL (size) - used);
3519 size = expand_binop (GET_MODE (size), sub_optab, size,
3520 GEN_INT (used), NULL_RTX, 0,
3524 /* Get the address of the stack space.
3525 In this case, we do not deal with EXTRA separately.
3526 A single stack adjust will do. */
3529 temp = push_block (size, extra, where_pad == downward);
3532 else if (GET_CODE (args_so_far) == CONST_INT)
3533 temp = memory_address (BLKmode,
3534 plus_constant (args_addr,
3535 skip + INTVAL (args_so_far)));
3537 temp = memory_address (BLKmode,
3538 plus_constant (gen_rtx_PLUS (Pmode,
3543 if (!ACCUMULATE_OUTGOING_ARGS)
3545 /* If the source is referenced relative to the stack pointer,
3546 copy it to another register to stabilize it. We do not need
3547 to do this if we know that we won't be changing sp. */
3549 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3550 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3551 temp = copy_to_reg (temp);
3554 target = gen_rtx_MEM (BLKmode, temp);
3556 /* We do *not* set_mem_attributes here, because incoming arguments
3557 may overlap with sibling call outgoing arguments and we cannot
3558 allow reordering of reads from function arguments with stores
3559 to outgoing arguments of sibling calls. We do, however, want
3560 to record the alignment of the stack slot. */
3561 /* ALIGN may well be better aligned than TYPE, e.g. due to
3562 PARM_BOUNDARY. Assume the caller isn't lying. */
3563 set_mem_align (target, align);
3565 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3568 else if (partial > 0)
3570 /* Scalar partly in registers. */
3572 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3575 /* # bytes of start of argument
3576 that we must make space for but need not store. */
3577 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3578 int args_offset = INTVAL (args_so_far);
3581 /* Push padding now if padding above and stack grows down,
3582 or if padding below and stack grows up.
3583 But if space already allocated, this has already been done. */
3584 if (extra && args_addr == 0
3585 && where_pad != none && where_pad != stack_direction)
3586 anti_adjust_stack (GEN_INT (extra));
3588 /* If we make space by pushing it, we might as well push
3589 the real data. Otherwise, we can leave OFFSET nonzero
3590 and leave the space uninitialized. */
3594 /* Now NOT_STACK gets the number of words that we don't need to
3595 allocate on the stack. Convert OFFSET to words too. */
3596 not_stack = (partial - offset) / UNITS_PER_WORD;
3597 offset /= UNITS_PER_WORD;
3599 /* If the partial register-part of the arg counts in its stack size,
3600 skip the part of stack space corresponding to the registers.
3601 Otherwise, start copying to the beginning of the stack space,
3602 by setting SKIP to 0. */
3603 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3605 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3606 x = validize_mem (force_const_mem (mode, x));
3608 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3609 SUBREGs of such registers are not allowed. */
3610 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3611 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3612 x = copy_to_reg (x);
3614 /* Loop over all the words allocated on the stack for this arg. */
3615 /* We can do it by words, because any scalar bigger than a word
3616 has a size a multiple of a word. */
3617 #ifndef PUSH_ARGS_REVERSED
3618 for (i = not_stack; i < size; i++)
3620 for (i = size - 1; i >= not_stack; i--)
3622 if (i >= not_stack + offset)
3623 emit_push_insn (operand_subword_force (x, i, mode),
3624 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3626 GEN_INT (args_offset + ((i - not_stack + skip)
3628 reg_parm_stack_space, alignment_pad);
3635 /* Push padding now if padding above and stack grows down,
3636 or if padding below and stack grows up.
3637 But if space already allocated, this has already been done. */
3638 if (extra && args_addr == 0
3639 && where_pad != none && where_pad != stack_direction)
3640 anti_adjust_stack (GEN_INT (extra));
3642 #ifdef PUSH_ROUNDING
3643 if (args_addr == 0 && PUSH_ARGS)
3644 emit_single_push_insn (mode, x, type);
3648 if (GET_CODE (args_so_far) == CONST_INT)
3650 = memory_address (mode,
3651 plus_constant (args_addr,
3652 INTVAL (args_so_far)));
3654 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3656 dest = gen_rtx_MEM (mode, addr);
3658 /* We do *not* set_mem_attributes here, because incoming arguments
3659 may overlap with sibling call outgoing arguments and we cannot
3660 allow reordering of reads from function arguments with stores
3661 to outgoing arguments of sibling calls. We do, however, want
3662 to record the alignment of the stack slot. */
3663 /* ALIGN may well be better aligned than TYPE, e.g. due to
3664 PARM_BOUNDARY. Assume the caller isn't lying. */
3665 set_mem_align (dest, align);
3667 emit_move_insn (dest, x);
3671 /* If part should go in registers, copy that part
3672 into the appropriate registers. Do this now, at the end,
3673 since mem-to-mem copies above may do function calls. */
3674 if (partial > 0 && reg != 0)
3676 /* Handle calls that pass values in multiple non-contiguous locations.
3677 The Irix 6 ABI has examples of this. */
3678 if (GET_CODE (reg) == PARALLEL)
3679 emit_group_load (reg, x, type, -1);
3682 gcc_assert (partial % UNITS_PER_WORD == 0);
3683 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3687 if (extra && args_addr == 0 && where_pad == stack_direction)
3688 anti_adjust_stack (GEN_INT (extra));
3690 if (alignment_pad && args_addr == 0)
3691 anti_adjust_stack (alignment_pad);
3694 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3698 get_subtarget (rtx x)
3702 /* Only registers can be subtargets. */
3704 /* Don't use hard regs to avoid extending their life. */
3705 || REGNO (x) < FIRST_PSEUDO_REGISTER
3709 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3710 FIELD is a bitfield. Returns true if the optimization was successful,
3711 and there's nothing else to do. */
3714 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3715 unsigned HOST_WIDE_INT bitpos,
3716 enum machine_mode mode1, rtx str_rtx,
3719 enum machine_mode str_mode = GET_MODE (str_rtx);
3720 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3725 if (mode1 != VOIDmode
3726 || bitsize >= BITS_PER_WORD
3727 || str_bitsize > BITS_PER_WORD
3728 || TREE_SIDE_EFFECTS (to)
3729 || TREE_THIS_VOLATILE (to))
3733 if (!BINARY_CLASS_P (src)
3734 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3737 op0 = TREE_OPERAND (src, 0);
3738 op1 = TREE_OPERAND (src, 1);
3741 if (!operand_equal_p (to, op0, 0))
3744 if (MEM_P (str_rtx))
3746 unsigned HOST_WIDE_INT offset1;
3748 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3749 str_mode = word_mode;
3750 str_mode = get_best_mode (bitsize, bitpos,
3751 MEM_ALIGN (str_rtx), str_mode, 0);
3752 if (str_mode == VOIDmode)
3754 str_bitsize = GET_MODE_BITSIZE (str_mode);
3757 bitpos %= str_bitsize;
3758 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3759 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3761 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3764 /* If the bit field covers the whole REG/MEM, store_field
3765 will likely generate better code. */
3766 if (bitsize >= str_bitsize)
3769 /* We can't handle fields split across multiple entities. */
3770 if (bitpos + bitsize > str_bitsize)
3773 if (BYTES_BIG_ENDIAN)
3774 bitpos = str_bitsize - bitpos - bitsize;
3776 switch (TREE_CODE (src))
3780 /* For now, just optimize the case of the topmost bitfield
3781 where we don't need to do any masking and also
3782 1 bit bitfields where xor can be used.
3783 We might win by one instruction for the other bitfields
3784 too if insv/extv instructions aren't used, so that
3785 can be added later. */
3786 if (bitpos + bitsize != str_bitsize
3787 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3790 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3791 value = convert_modes (str_mode,
3792 TYPE_MODE (TREE_TYPE (op1)), value,
3793 TYPE_UNSIGNED (TREE_TYPE (op1)));
3795 /* We may be accessing data outside the field, which means
3796 we can alias adjacent data. */
3797 if (MEM_P (str_rtx))
3799 str_rtx = shallow_copy_rtx (str_rtx);
3800 set_mem_alias_set (str_rtx, 0);
3801 set_mem_expr (str_rtx, 0);
3804 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3805 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3807 value = expand_and (str_mode, value, const1_rtx, NULL);
3810 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3811 build_int_cst (NULL_TREE, bitpos),
3813 result = expand_binop (str_mode, binop, str_rtx,
3814 value, str_rtx, 1, OPTAB_WIDEN);
3815 if (result != str_rtx)
3816 emit_move_insn (str_rtx, result);
3821 if (TREE_CODE (op1) != INTEGER_CST)
3823 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3824 value = convert_modes (GET_MODE (str_rtx),
3825 TYPE_MODE (TREE_TYPE (op1)), value,
3826 TYPE_UNSIGNED (TREE_TYPE (op1)));
3828 /* We may be accessing data outside the field, which means
3829 we can alias adjacent data. */
3830 if (MEM_P (str_rtx))
3832 str_rtx = shallow_copy_rtx (str_rtx);
3833 set_mem_alias_set (str_rtx, 0);
3834 set_mem_expr (str_rtx, 0);
3837 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3838 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3840 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3842 value = expand_and (GET_MODE (str_rtx), value, mask,
3845 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3846 build_int_cst (NULL_TREE, bitpos),
3848 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3849 value, str_rtx, 1, OPTAB_WIDEN);
3850 if (result != str_rtx)
3851 emit_move_insn (str_rtx, result);
3862 /* Expand an assignment that stores the value of FROM into TO. */
3865 expand_assignment (tree to, tree from)
3870 /* Don't crash if the lhs of the assignment was erroneous. */
3872 if (TREE_CODE (to) == ERROR_MARK)
3874 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3878 /* Assignment of a structure component needs special treatment
3879 if the structure component's rtx is not simply a MEM.
3880 Assignment of an array element at a constant index, and assignment of
3881 an array element in an unaligned packed structure field, has the same
3883 if (handled_component_p (to)
3884 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3886 enum machine_mode mode1;
3887 HOST_WIDE_INT bitsize, bitpos;
3894 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3895 &unsignedp, &volatilep, true);
3897 /* If we are going to use store_bit_field and extract_bit_field,
3898 make sure to_rtx will be safe for multiple use. */
3900 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3904 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3906 gcc_assert (MEM_P (to_rtx));
3908 #ifdef POINTERS_EXTEND_UNSIGNED
3909 if (GET_MODE (offset_rtx) != Pmode)
3910 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3912 if (GET_MODE (offset_rtx) != ptr_mode)
3913 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3916 /* A constant address in TO_RTX can have VOIDmode, we must not try
3917 to call force_reg for that case. Avoid that case. */
3919 && GET_MODE (to_rtx) == BLKmode
3920 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3922 && (bitpos % bitsize) == 0
3923 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3924 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3926 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3930 to_rtx = offset_address (to_rtx, offset_rtx,
3931 highest_pow2_factor_for_target (to,
3935 /* Handle expand_expr of a complex value returning a CONCAT. */
3936 if (GET_CODE (to_rtx) == CONCAT)
3938 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3940 gcc_assert (bitpos == 0);
3941 result = store_expr (from, to_rtx, false);
3945 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3946 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3953 /* If the field is at offset zero, we could have been given the
3954 DECL_RTX of the parent struct. Don't munge it. */
3955 to_rtx = shallow_copy_rtx (to_rtx);
3957 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3959 /* Deal with volatile and readonly fields. The former is only
3960 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3962 MEM_VOLATILE_P (to_rtx) = 1;
3963 if (component_uses_parent_alias_set (to))
3964 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3967 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3971 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3972 TREE_TYPE (tem), get_alias_set (to));
3976 preserve_temp_slots (result);
3982 /* If the rhs is a function call and its value is not an aggregate,
3983 call the function before we start to compute the lhs.
3984 This is needed for correct code for cases such as
3985 val = setjmp (buf) on machines where reference to val
3986 requires loading up part of an address in a separate insn.
3988 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3989 since it might be a promoted variable where the zero- or sign- extension
3990 needs to be done. Handling this in the normal way is safe because no
3991 computation is done before the call. */
3992 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3993 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3994 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3995 && REG_P (DECL_RTL (to))))
4000 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4002 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4004 /* Handle calls that return values in multiple non-contiguous locations.
4005 The Irix 6 ABI has examples of this. */
4006 if (GET_CODE (to_rtx) == PARALLEL)
4007 emit_group_load (to_rtx, value, TREE_TYPE (from),
4008 int_size_in_bytes (TREE_TYPE (from)));
4009 else if (GET_MODE (to_rtx) == BLKmode)
4010 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4013 if (POINTER_TYPE_P (TREE_TYPE (to)))
4014 value = convert_memory_address (GET_MODE (to_rtx), value);
4015 emit_move_insn (to_rtx, value);
4017 preserve_temp_slots (to_rtx);
4023 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4024 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4027 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4029 /* Don't move directly into a return register. */
4030 if (TREE_CODE (to) == RESULT_DECL
4031 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4036 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4038 if (GET_CODE (to_rtx) == PARALLEL)
4039 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4040 int_size_in_bytes (TREE_TYPE (from)));
4042 emit_move_insn (to_rtx, temp);
4044 preserve_temp_slots (to_rtx);
4050 /* In case we are returning the contents of an object which overlaps
4051 the place the value is being stored, use a safe function when copying
4052 a value through a pointer into a structure value return block. */
4053 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4054 && current_function_returns_struct
4055 && !current_function_returns_pcc_struct)
4060 size = expr_size (from);
4061 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4063 emit_library_call (memmove_libfunc, LCT_NORMAL,
4064 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4065 XEXP (from_rtx, 0), Pmode,
4066 convert_to_mode (TYPE_MODE (sizetype),
4067 size, TYPE_UNSIGNED (sizetype)),
4068 TYPE_MODE (sizetype));
4070 preserve_temp_slots (to_rtx);
4076 /* Compute FROM and store the value in the rtx we got. */
4079 result = store_expr (from, to_rtx, 0);
4080 preserve_temp_slots (result);
4086 /* Generate code for computing expression EXP,
4087 and storing the value into TARGET.
4089 If the mode is BLKmode then we may return TARGET itself.
4090 It turns out that in BLKmode it doesn't cause a problem.
4091 because C has no operators that could combine two different
4092 assignments into the same BLKmode object with different values
4093 with no sequence point. Will other languages need this to
4096 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4097 stack, and block moves may need to be treated specially. */
4100 store_expr (tree exp, rtx target, int call_param_p)
4103 rtx alt_rtl = NULL_RTX;
4104 int dont_return_target = 0;
4106 if (VOID_TYPE_P (TREE_TYPE (exp)))
4108 /* C++ can generate ?: expressions with a throw expression in one
4109 branch and an rvalue in the other. Here, we resolve attempts to
4110 store the throw expression's nonexistent result. */
4111 gcc_assert (!call_param_p);
4112 expand_expr (exp, const0_rtx, VOIDmode, 0);
4115 if (TREE_CODE (exp) == COMPOUND_EXPR)
4117 /* Perform first part of compound expression, then assign from second
4119 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4120 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4121 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4123 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4125 /* For conditional expression, get safe form of the target. Then
4126 test the condition, doing the appropriate assignment on either
4127 side. This avoids the creation of unnecessary temporaries.
4128 For non-BLKmode, it is more efficient not to do this. */
4130 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4132 do_pending_stack_adjust ();
4134 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4135 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4136 emit_jump_insn (gen_jump (lab2));
4139 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4145 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4146 /* If this is a scalar in a register that is stored in a wider mode
4147 than the declared mode, compute the result into its declared mode
4148 and then convert to the wider mode. Our value is the computed
4151 rtx inner_target = 0;
4153 /* We can do the conversion inside EXP, which will often result
4154 in some optimizations. Do the conversion in two steps: first
4155 change the signedness, if needed, then the extend. But don't
4156 do this if the type of EXP is a subtype of something else
4157 since then the conversion might involve more than just
4158 converting modes. */
4159 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4160 && TREE_TYPE (TREE_TYPE (exp)) == 0
4161 && (!lang_hooks.reduce_bit_field_operations
4162 || (GET_MODE_PRECISION (GET_MODE (target))
4163 == TYPE_PRECISION (TREE_TYPE (exp)))))
4165 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4166 != SUBREG_PROMOTED_UNSIGNED_P (target))
4168 (lang_hooks.types.signed_or_unsigned_type
4169 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4171 exp = convert (lang_hooks.types.type_for_mode
4172 (GET_MODE (SUBREG_REG (target)),
4173 SUBREG_PROMOTED_UNSIGNED_P (target)),
4176 inner_target = SUBREG_REG (target);
4179 temp = expand_expr (exp, inner_target, VOIDmode,
4180 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4182 /* If TEMP is a VOIDmode constant, use convert_modes to make
4183 sure that we properly convert it. */
4184 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4186 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4187 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4188 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4189 GET_MODE (target), temp,
4190 SUBREG_PROMOTED_UNSIGNED_P (target));
4193 convert_move (SUBREG_REG (target), temp,
4194 SUBREG_PROMOTED_UNSIGNED_P (target));
4200 temp = expand_expr_real (exp, target, GET_MODE (target),
4202 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4204 /* Return TARGET if it's a specified hardware register.
4205 If TARGET is a volatile mem ref, either return TARGET
4206 or return a reg copied *from* TARGET; ANSI requires this.
4208 Otherwise, if TEMP is not TARGET, return TEMP
4209 if it is constant (for efficiency),
4210 or if we really want the correct value. */
4211 if (!(target && REG_P (target)
4212 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4213 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4214 && ! rtx_equal_p (temp, target)
4215 && CONSTANT_P (temp))
4216 dont_return_target = 1;
4219 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4220 the same as that of TARGET, adjust the constant. This is needed, for
4221 example, in case it is a CONST_DOUBLE and we want only a word-sized
4223 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4224 && TREE_CODE (exp) != ERROR_MARK
4225 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4226 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4227 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4229 /* If value was not generated in the target, store it there.
4230 Convert the value to TARGET's type first if necessary and emit the
4231 pending incrementations that have been queued when expanding EXP.
4232 Note that we cannot emit the whole queue blindly because this will
4233 effectively disable the POST_INC optimization later.
4235 If TEMP and TARGET compare equal according to rtx_equal_p, but
4236 one or both of them are volatile memory refs, we have to distinguish
4238 - expand_expr has used TARGET. In this case, we must not generate
4239 another copy. This can be detected by TARGET being equal according
4241 - expand_expr has not used TARGET - that means that the source just
4242 happens to have the same RTX form. Since temp will have been created
4243 by expand_expr, it will compare unequal according to == .
4244 We must generate a copy in this case, to reach the correct number
4245 of volatile memory references. */
4247 if ((! rtx_equal_p (temp, target)
4248 || (temp != target && (side_effects_p (temp)
4249 || side_effects_p (target))))
4250 && TREE_CODE (exp) != ERROR_MARK
4251 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4252 but TARGET is not valid memory reference, TEMP will differ
4253 from TARGET although it is really the same location. */
4254 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4255 /* If there's nothing to copy, don't bother. Don't call
4256 expr_size unless necessary, because some front-ends (C++)
4257 expr_size-hook must not be given objects that are not
4258 supposed to be bit-copied or bit-initialized. */
4259 && expr_size (exp) != const0_rtx)
4261 if (GET_MODE (temp) != GET_MODE (target)
4262 && GET_MODE (temp) != VOIDmode)
4264 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4265 if (dont_return_target)
4267 /* In this case, we will return TEMP,
4268 so make sure it has the proper mode.
4269 But don't forget to store the value into TARGET. */
4270 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4271 emit_move_insn (target, temp);
4274 convert_move (target, temp, unsignedp);
4277 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4279 /* Handle copying a string constant into an array. The string
4280 constant may be shorter than the array. So copy just the string's
4281 actual length, and clear the rest. First get the size of the data
4282 type of the string, which is actually the size of the target. */
4283 rtx size = expr_size (exp);
4285 if (GET_CODE (size) == CONST_INT
4286 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4287 emit_block_move (target, temp, size,
4289 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4292 /* Compute the size of the data to copy from the string. */
4294 = size_binop (MIN_EXPR,
4295 make_tree (sizetype, size),
4296 size_int (TREE_STRING_LENGTH (exp)));
4298 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4300 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4303 /* Copy that much. */
4304 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4305 TYPE_UNSIGNED (sizetype));
4306 emit_block_move (target, temp, copy_size_rtx,
4308 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4310 /* Figure out how much is left in TARGET that we have to clear.
4311 Do all calculations in ptr_mode. */
4312 if (GET_CODE (copy_size_rtx) == CONST_INT)
4314 size = plus_constant (size, -INTVAL (copy_size_rtx));
4315 target = adjust_address (target, BLKmode,
4316 INTVAL (copy_size_rtx));
4320 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4321 copy_size_rtx, NULL_RTX, 0,
4324 #ifdef POINTERS_EXTEND_UNSIGNED
4325 if (GET_MODE (copy_size_rtx) != Pmode)
4326 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4327 TYPE_UNSIGNED (sizetype));
4330 target = offset_address (target, copy_size_rtx,
4331 highest_pow2_factor (copy_size));
4332 label = gen_label_rtx ();
4333 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4334 GET_MODE (size), 0, label);
4337 if (size != const0_rtx)
4338 clear_storage (target, size, BLOCK_OP_NORMAL);
4344 /* Handle calls that return values in multiple non-contiguous locations.
4345 The Irix 6 ABI has examples of this. */
4346 else if (GET_CODE (target) == PARALLEL)
4347 emit_group_load (target, temp, TREE_TYPE (exp),
4348 int_size_in_bytes (TREE_TYPE (exp)));
4349 else if (GET_MODE (temp) == BLKmode)
4350 emit_block_move (target, temp, expr_size (exp),
4352 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4355 temp = force_operand (temp, target);
4357 emit_move_insn (target, temp);
4364 /* Examine CTOR to discover:
4365 * how many scalar fields are set to nonzero values,
4366 and place it in *P_NZ_ELTS;
4367 * how many scalar fields are set to non-constant values,
4368 and place it in *P_NC_ELTS; and
4369 * how many scalar fields in total are in CTOR,
4370 and place it in *P_ELT_COUNT.
4371 * if a type is a union, and the initializer from the constructor
4372 is not the largest element in the union, then set *p_must_clear. */
4375 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4376 HOST_WIDE_INT *p_nc_elts,
4377 HOST_WIDE_INT *p_elt_count,
4380 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4387 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4389 tree value = TREE_VALUE (list);
4390 tree purpose = TREE_PURPOSE (list);
4394 if (TREE_CODE (purpose) == RANGE_EXPR)
4396 tree lo_index = TREE_OPERAND (purpose, 0);
4397 tree hi_index = TREE_OPERAND (purpose, 1);
4399 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4400 mult = (tree_low_cst (hi_index, 1)
4401 - tree_low_cst (lo_index, 1) + 1);
4404 switch (TREE_CODE (value))
4408 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4409 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4410 nz_elts += mult * nz;
4411 nc_elts += mult * nc;
4412 elt_count += mult * ic;
4418 if (!initializer_zerop (value))
4424 nz_elts += mult * TREE_STRING_LENGTH (value);
4425 elt_count += mult * TREE_STRING_LENGTH (value);
4429 if (!initializer_zerop (TREE_REALPART (value)))
4431 if (!initializer_zerop (TREE_IMAGPART (value)))
4439 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4441 if (!initializer_zerop (TREE_VALUE (v)))
4451 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4458 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4459 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4462 bool clear_this = true;
4464 list = CONSTRUCTOR_ELTS (ctor);
4467 /* We don't expect more than one element of the union to be
4468 initialized. Not sure what we should do otherwise... */
4469 gcc_assert (TREE_CHAIN (list) == NULL);
4471 init_sub_type = TREE_TYPE (TREE_VALUE (list));
4473 /* ??? We could look at each element of the union, and find the
4474 largest element. Which would avoid comparing the size of the
4475 initialized element against any tail padding in the union.
4476 Doesn't seem worth the effort... */
4477 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4478 TYPE_SIZE (init_sub_type)) == 1)
4480 /* And now we have to find out if the element itself is fully
4481 constructed. E.g. for union { struct { int a, b; } s; } u
4482 = { .s = { .a = 1 } }. */
4483 if (elt_count == count_type_elements (init_sub_type))
4488 *p_must_clear = clear_this;
4491 *p_nz_elts += nz_elts;
4492 *p_nc_elts += nc_elts;
4493 *p_elt_count += elt_count;
4497 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4498 HOST_WIDE_INT *p_nc_elts,
4499 HOST_WIDE_INT *p_elt_count,
4505 *p_must_clear = false;
4506 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4510 /* Count the number of scalars in TYPE. Return -1 on overflow or
4514 count_type_elements (tree type)
4516 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4517 switch (TREE_CODE (type))
4521 tree telts = array_type_nelts (type);
4522 if (telts && host_integerp (telts, 1))
4524 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4525 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4528 else if (max / n > m)
4536 HOST_WIDE_INT n = 0, t;
4539 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4540 if (TREE_CODE (f) == FIELD_DECL)
4542 t = count_type_elements (TREE_TYPE (f));
4552 case QUAL_UNION_TYPE:
4554 /* Ho hum. How in the world do we guess here? Clearly it isn't
4555 right to count the fields. Guess based on the number of words. */
4556 HOST_WIDE_INT n = int_size_in_bytes (type);
4559 return n / UNITS_PER_WORD;
4566 return TYPE_VECTOR_SUBPARTS (type);
4575 case REFERENCE_TYPE:
4587 /* Return 1 if EXP contains mostly (3/4) zeros. */
4590 mostly_zeros_p (tree exp)
4592 if (TREE_CODE (exp) == CONSTRUCTOR)
4595 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4598 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4602 elts = count_type_elements (TREE_TYPE (exp));
4604 return nz_elts < elts / 4;
4607 return initializer_zerop (exp);
4610 /* Helper function for store_constructor.
4611 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4612 TYPE is the type of the CONSTRUCTOR, not the element type.
4613 CLEARED is as for store_constructor.
4614 ALIAS_SET is the alias set to use for any stores.
4616 This provides a recursive shortcut back to store_constructor when it isn't
4617 necessary to go through store_field. This is so that we can pass through
4618 the cleared field to let store_constructor know that we may not have to
4619 clear a substructure if the outer structure has already been cleared. */
4622 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4623 HOST_WIDE_INT bitpos, enum machine_mode mode,
4624 tree exp, tree type, int cleared, int alias_set)
4626 if (TREE_CODE (exp) == CONSTRUCTOR
4627 /* We can only call store_constructor recursively if the size and
4628 bit position are on a byte boundary. */
4629 && bitpos % BITS_PER_UNIT == 0
4630 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4631 /* If we have a nonzero bitpos for a register target, then we just
4632 let store_field do the bitfield handling. This is unlikely to
4633 generate unnecessary clear instructions anyways. */
4634 && (bitpos == 0 || MEM_P (target)))
4638 = adjust_address (target,
4639 GET_MODE (target) == BLKmode
4641 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4642 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4645 /* Update the alias set, if required. */
4646 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4647 && MEM_ALIAS_SET (target) != 0)
4649 target = copy_rtx (target);
4650 set_mem_alias_set (target, alias_set);
4653 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4656 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4659 /* Store the value of constructor EXP into the rtx TARGET.
4660 TARGET is either a REG or a MEM; we know it cannot conflict, since
4661 safe_from_p has been called.
4662 CLEARED is true if TARGET is known to have been zero'd.
4663 SIZE is the number of bytes of TARGET we are allowed to modify: this
4664 may not be the same as the size of EXP if we are assigning to a field
4665 which has been packed to exclude padding bits. */
4668 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4670 tree type = TREE_TYPE (exp);
4671 #ifdef WORD_REGISTER_OPERATIONS
4672 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4675 switch (TREE_CODE (type))
4679 case QUAL_UNION_TYPE:
4683 /* If size is zero or the target is already cleared, do nothing. */
4684 if (size == 0 || cleared)
4686 /* We either clear the aggregate or indicate the value is dead. */
4687 else if ((TREE_CODE (type) == UNION_TYPE
4688 || TREE_CODE (type) == QUAL_UNION_TYPE)
4689 && ! CONSTRUCTOR_ELTS (exp))
4690 /* If the constructor is empty, clear the union. */
4692 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4696 /* If we are building a static constructor into a register,
4697 set the initial value as zero so we can fold the value into
4698 a constant. But if more than one register is involved,
4699 this probably loses. */
4700 else if (REG_P (target) && TREE_STATIC (exp)
4701 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4703 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4707 /* If the constructor has fewer fields than the structure or
4708 if we are initializing the structure to mostly zeros, clear
4709 the whole structure first. Don't do this if TARGET is a
4710 register whose mode size isn't equal to SIZE since
4711 clear_storage can't handle this case. */
4713 && ((list_length (CONSTRUCTOR_ELTS (exp))
4714 != fields_length (type))
4715 || mostly_zeros_p (exp))
4717 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4720 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4725 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4727 /* Store each element of the constructor into the
4728 corresponding field of TARGET. */
4730 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4732 tree field = TREE_PURPOSE (elt);
4733 tree value = TREE_VALUE (elt);
4734 enum machine_mode mode;
4735 HOST_WIDE_INT bitsize;
4736 HOST_WIDE_INT bitpos = 0;
4738 rtx to_rtx = target;
4740 /* Just ignore missing fields. We cleared the whole
4741 structure, above, if any fields are missing. */
4745 if (cleared && initializer_zerop (value))
4748 if (host_integerp (DECL_SIZE (field), 1))
4749 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4753 mode = DECL_MODE (field);
4754 if (DECL_BIT_FIELD (field))
4757 offset = DECL_FIELD_OFFSET (field);
4758 if (host_integerp (offset, 0)
4759 && host_integerp (bit_position (field), 0))
4761 bitpos = int_bit_position (field);
4765 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4772 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4773 make_tree (TREE_TYPE (exp),
4776 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4777 gcc_assert (MEM_P (to_rtx));
4779 #ifdef POINTERS_EXTEND_UNSIGNED
4780 if (GET_MODE (offset_rtx) != Pmode)
4781 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4783 if (GET_MODE (offset_rtx) != ptr_mode)
4784 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4787 to_rtx = offset_address (to_rtx, offset_rtx,
4788 highest_pow2_factor (offset));
4791 #ifdef WORD_REGISTER_OPERATIONS
4792 /* If this initializes a field that is smaller than a
4793 word, at the start of a word, try to widen it to a full
4794 word. This special case allows us to output C++ member
4795 function initializations in a form that the optimizers
4798 && bitsize < BITS_PER_WORD
4799 && bitpos % BITS_PER_WORD == 0
4800 && GET_MODE_CLASS (mode) == MODE_INT
4801 && TREE_CODE (value) == INTEGER_CST
4803 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4805 tree type = TREE_TYPE (value);
4807 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4809 type = lang_hooks.types.type_for_size
4810 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4811 value = convert (type, value);
4814 if (BYTES_BIG_ENDIAN)
4816 = fold_build2 (LSHIFT_EXPR, type, value,
4817 build_int_cst (NULL_TREE,
4818 BITS_PER_WORD - bitsize));
4819 bitsize = BITS_PER_WORD;
4824 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4825 && DECL_NONADDRESSABLE_P (field))
4827 to_rtx = copy_rtx (to_rtx);
4828 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4831 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4832 value, type, cleared,
4833 get_alias_set (TREE_TYPE (field)));
4843 tree elttype = TREE_TYPE (type);
4845 HOST_WIDE_INT minelt = 0;
4846 HOST_WIDE_INT maxelt = 0;
4848 domain = TYPE_DOMAIN (type);
4849 const_bounds_p = (TYPE_MIN_VALUE (domain)
4850 && TYPE_MAX_VALUE (domain)
4851 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4852 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4854 /* If we have constant bounds for the range of the type, get them. */
4857 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4858 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4861 /* If the constructor has fewer elements than the array, clear
4862 the whole array first. Similarly if this is static
4863 constructor of a non-BLKmode object. */
4866 else if (REG_P (target) && TREE_STATIC (exp))
4870 HOST_WIDE_INT count = 0, zero_count = 0;
4871 need_to_clear = ! const_bounds_p;
4873 /* This loop is a more accurate version of the loop in
4874 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4875 is also needed to check for missing elements. */
4876 for (elt = CONSTRUCTOR_ELTS (exp);
4877 elt != NULL_TREE && ! need_to_clear;
4878 elt = TREE_CHAIN (elt))
4880 tree index = TREE_PURPOSE (elt);
4881 HOST_WIDE_INT this_node_count;
4883 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4885 tree lo_index = TREE_OPERAND (index, 0);
4886 tree hi_index = TREE_OPERAND (index, 1);
4888 if (! host_integerp (lo_index, 1)
4889 || ! host_integerp (hi_index, 1))
4895 this_node_count = (tree_low_cst (hi_index, 1)
4896 - tree_low_cst (lo_index, 1) + 1);
4899 this_node_count = 1;
4901 count += this_node_count;
4902 if (mostly_zeros_p (TREE_VALUE (elt)))
4903 zero_count += this_node_count;
4906 /* Clear the entire array first if there are any missing
4907 elements, or if the incidence of zero elements is >=
4910 && (count < maxelt - minelt + 1
4911 || 4 * zero_count >= 3 * count))
4915 if (need_to_clear && size > 0)
4918 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4920 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4924 if (!cleared && REG_P (target))
4925 /* Inform later passes that the old value is dead. */
4926 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4928 /* Store each element of the constructor into the
4929 corresponding element of TARGET, determined by counting the
4931 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4933 elt = TREE_CHAIN (elt), i++)
4935 enum machine_mode mode;
4936 HOST_WIDE_INT bitsize;
4937 HOST_WIDE_INT bitpos;
4939 tree value = TREE_VALUE (elt);
4940 tree index = TREE_PURPOSE (elt);
4941 rtx xtarget = target;
4943 if (cleared && initializer_zerop (value))
4946 unsignedp = TYPE_UNSIGNED (elttype);
4947 mode = TYPE_MODE (elttype);
4948 if (mode == BLKmode)
4949 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4950 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4953 bitsize = GET_MODE_BITSIZE (mode);
4955 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4957 tree lo_index = TREE_OPERAND (index, 0);
4958 tree hi_index = TREE_OPERAND (index, 1);
4959 rtx index_r, pos_rtx;
4960 HOST_WIDE_INT lo, hi, count;
4963 /* If the range is constant and "small", unroll the loop. */
4965 && host_integerp (lo_index, 0)
4966 && host_integerp (hi_index, 0)
4967 && (lo = tree_low_cst (lo_index, 0),
4968 hi = tree_low_cst (hi_index, 0),
4969 count = hi - lo + 1,
4972 || (host_integerp (TYPE_SIZE (elttype), 1)
4973 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4976 lo -= minelt; hi -= minelt;
4977 for (; lo <= hi; lo++)
4979 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4982 && !MEM_KEEP_ALIAS_SET_P (target)
4983 && TREE_CODE (type) == ARRAY_TYPE
4984 && TYPE_NONALIASED_COMPONENT (type))
4986 target = copy_rtx (target);
4987 MEM_KEEP_ALIAS_SET_P (target) = 1;
4990 store_constructor_field
4991 (target, bitsize, bitpos, mode, value, type, cleared,
4992 get_alias_set (elttype));
4997 rtx loop_start = gen_label_rtx ();
4998 rtx loop_end = gen_label_rtx ();
5001 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5002 unsignedp = TYPE_UNSIGNED (domain);
5004 index = build_decl (VAR_DECL, NULL_TREE, domain);
5007 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5009 SET_DECL_RTL (index, index_r);
5010 store_expr (lo_index, index_r, 0);
5012 /* Build the head of the loop. */
5013 do_pending_stack_adjust ();
5014 emit_label (loop_start);
5016 /* Assign value to element index. */
5018 = convert (ssizetype,
5019 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5020 index, TYPE_MIN_VALUE (domain)));
5021 position = size_binop (MULT_EXPR, position,
5023 TYPE_SIZE_UNIT (elttype)));
5025 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5026 xtarget = offset_address (target, pos_rtx,
5027 highest_pow2_factor (position));
5028 xtarget = adjust_address (xtarget, mode, 0);
5029 if (TREE_CODE (value) == CONSTRUCTOR)
5030 store_constructor (value, xtarget, cleared,
5031 bitsize / BITS_PER_UNIT);
5033 store_expr (value, xtarget, 0);
5035 /* Generate a conditional jump to exit the loop. */
5036 exit_cond = build2 (LT_EXPR, integer_type_node,
5038 jumpif (exit_cond, loop_end);
5040 /* Update the loop counter, and jump to the head of
5042 expand_assignment (index,
5043 build2 (PLUS_EXPR, TREE_TYPE (index),
5044 index, integer_one_node));
5046 emit_jump (loop_start);
5048 /* Build the end of the loop. */
5049 emit_label (loop_end);
5052 else if ((index != 0 && ! host_integerp (index, 0))
5053 || ! host_integerp (TYPE_SIZE (elttype), 1))
5058 index = ssize_int (1);
5061 index = fold_convert (ssizetype,
5062 fold_build2 (MINUS_EXPR,
5065 TYPE_MIN_VALUE (domain)));
5067 position = size_binop (MULT_EXPR, index,
5069 TYPE_SIZE_UNIT (elttype)));
5070 xtarget = offset_address (target,
5071 expand_expr (position, 0, VOIDmode, 0),
5072 highest_pow2_factor (position));
5073 xtarget = adjust_address (xtarget, mode, 0);
5074 store_expr (value, xtarget, 0);
5079 bitpos = ((tree_low_cst (index, 0) - minelt)
5080 * tree_low_cst (TYPE_SIZE (elttype), 1));
5082 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5084 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5085 && TREE_CODE (type) == ARRAY_TYPE
5086 && TYPE_NONALIASED_COMPONENT (type))
5088 target = copy_rtx (target);
5089 MEM_KEEP_ALIAS_SET_P (target) = 1;
5091 store_constructor_field (target, bitsize, bitpos, mode, value,
5092 type, cleared, get_alias_set (elttype));
5104 tree elttype = TREE_TYPE (type);
5105 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5106 enum machine_mode eltmode = TYPE_MODE (elttype);
5107 HOST_WIDE_INT bitsize;
5108 HOST_WIDE_INT bitpos;
5109 rtvec vector = NULL;
5112 gcc_assert (eltmode != BLKmode);
5114 n_elts = TYPE_VECTOR_SUBPARTS (type);
5115 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5117 enum machine_mode mode = GET_MODE (target);
5119 icode = (int) vec_init_optab->handlers[mode].insn_code;
5120 if (icode != CODE_FOR_nothing)
5124 vector = rtvec_alloc (n_elts);
5125 for (i = 0; i < n_elts; i++)
5126 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5130 /* If the constructor has fewer elements than the vector,
5131 clear the whole array first. Similarly if this is static
5132 constructor of a non-BLKmode object. */
5135 else if (REG_P (target) && TREE_STATIC (exp))
5139 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5141 for (elt = CONSTRUCTOR_ELTS (exp);
5143 elt = TREE_CHAIN (elt))
5145 int n_elts_here = tree_low_cst
5146 (int_const_binop (TRUNC_DIV_EXPR,
5147 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5148 TYPE_SIZE (elttype), 0), 1);
5150 count += n_elts_here;
5151 if (mostly_zeros_p (TREE_VALUE (elt)))
5152 zero_count += n_elts_here;
5155 /* Clear the entire vector first if there are any missing elements,
5156 or if the incidence of zero elements is >= 75%. */
5157 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5160 if (need_to_clear && size > 0 && !vector)
5163 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5165 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5169 /* Inform later passes that the old value is dead. */
5170 if (!cleared && REG_P (target))
5171 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5173 /* Store each element of the constructor into the corresponding
5174 element of TARGET, determined by counting the elements. */
5175 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5177 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5179 tree value = TREE_VALUE (elt);
5180 tree index = TREE_PURPOSE (elt);
5181 HOST_WIDE_INT eltpos;
5183 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5184 if (cleared && initializer_zerop (value))
5188 eltpos = tree_low_cst (index, 1);
5194 /* Vector CONSTRUCTORs should only be built from smaller
5195 vectors in the case of BLKmode vectors. */
5196 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5197 RTVEC_ELT (vector, eltpos)
5198 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5202 enum machine_mode value_mode =
5203 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5204 ? TYPE_MODE (TREE_TYPE (value))
5206 bitpos = eltpos * elt_size;
5207 store_constructor_field (target, bitsize, bitpos,
5208 value_mode, value, type,
5209 cleared, get_alias_set (elttype));
5214 emit_insn (GEN_FCN (icode)
5216 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5225 /* Store the value of EXP (an expression tree)
5226 into a subfield of TARGET which has mode MODE and occupies
5227 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5228 If MODE is VOIDmode, it means that we are storing into a bit-field.
5230 Always return const0_rtx unless we have something particular to
5233 TYPE is the type of the underlying object,
5235 ALIAS_SET is the alias set for the destination. This value will
5236 (in general) be different from that for TARGET, since TARGET is a
5237 reference to the containing structure. */
5240 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5241 enum machine_mode mode, tree exp, tree type, int alias_set)
5243 HOST_WIDE_INT width_mask = 0;
5245 if (TREE_CODE (exp) == ERROR_MARK)
5248 /* If we have nothing to store, do nothing unless the expression has
5251 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5252 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5253 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5255 /* If we are storing into an unaligned field of an aligned union that is
5256 in a register, we may have the mode of TARGET being an integer mode but
5257 MODE == BLKmode. In that case, get an aligned object whose size and
5258 alignment are the same as TARGET and store TARGET into it (we can avoid
5259 the store if the field being stored is the entire width of TARGET). Then
5260 call ourselves recursively to store the field into a BLKmode version of
5261 that object. Finally, load from the object into TARGET. This is not
5262 very efficient in general, but should only be slightly more expensive
5263 than the otherwise-required unaligned accesses. Perhaps this can be
5264 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5265 twice, once with emit_move_insn and once via store_field. */
5268 && (REG_P (target) || GET_CODE (target) == SUBREG))
5270 rtx object = assign_temp (type, 0, 1, 1);
5271 rtx blk_object = adjust_address (object, BLKmode, 0);
5273 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5274 emit_move_insn (object, target);
5276 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5278 emit_move_insn (target, object);
5280 /* We want to return the BLKmode version of the data. */
5284 if (GET_CODE (target) == CONCAT)
5286 /* We're storing into a struct containing a single __complex. */
5288 gcc_assert (!bitpos);
5289 return store_expr (exp, target, 0);
5292 /* If the structure is in a register or if the component
5293 is a bit field, we cannot use addressing to access it.
5294 Use bit-field techniques or SUBREG to store in it. */
5296 if (mode == VOIDmode
5297 || (mode != BLKmode && ! direct_store[(int) mode]
5298 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5299 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5301 || GET_CODE (target) == SUBREG
5302 /* If the field isn't aligned enough to store as an ordinary memref,
5303 store it as a bit field. */
5305 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5306 || bitpos % GET_MODE_ALIGNMENT (mode))
5307 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5308 || (bitpos % BITS_PER_UNIT != 0)))
5309 /* If the RHS and field are a constant size and the size of the
5310 RHS isn't the same size as the bitfield, we must use bitfield
5313 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5314 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5318 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5319 implies a mask operation. If the precision is the same size as
5320 the field we're storing into, that mask is redundant. This is
5321 particularly common with bit field assignments generated by the
5323 if (TREE_CODE (exp) == NOP_EXPR)
5325 tree type = TREE_TYPE (exp);
5326 if (INTEGRAL_TYPE_P (type)
5327 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5328 && bitsize == TYPE_PRECISION (type))
5330 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5331 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5332 exp = TREE_OPERAND (exp, 0);
5336 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5338 /* If BITSIZE is narrower than the size of the type of EXP
5339 we will be narrowing TEMP. Normally, what's wanted are the
5340 low-order bits. However, if EXP's type is a record and this is
5341 big-endian machine, we want the upper BITSIZE bits. */
5342 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5343 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5344 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5345 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5346 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5350 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5352 if (mode != VOIDmode && mode != BLKmode
5353 && mode != TYPE_MODE (TREE_TYPE (exp)))
5354 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5356 /* If the modes of TARGET and TEMP are both BLKmode, both
5357 must be in memory and BITPOS must be aligned on a byte
5358 boundary. If so, we simply do a block copy. */
5359 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5361 gcc_assert (MEM_P (target) && MEM_P (temp)
5362 && !(bitpos % BITS_PER_UNIT));
5364 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5365 emit_block_move (target, temp,
5366 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5373 /* Store the value in the bitfield. */
5374 store_bit_field (target, bitsize, bitpos, mode, temp);
5380 /* Now build a reference to just the desired component. */
5381 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5383 if (to_rtx == target)
5384 to_rtx = copy_rtx (to_rtx);
5386 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5387 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5388 set_mem_alias_set (to_rtx, alias_set);
5390 return store_expr (exp, to_rtx, 0);
5394 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5395 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5396 codes and find the ultimate containing object, which we return.
5398 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5399 bit position, and *PUNSIGNEDP to the signedness of the field.
5400 If the position of the field is variable, we store a tree
5401 giving the variable offset (in units) in *POFFSET.
5402 This offset is in addition to the bit position.
5403 If the position is not variable, we store 0 in *POFFSET.
5405 If any of the extraction expressions is volatile,
5406 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5408 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5409 is a mode that can be used to access the field. In that case, *PBITSIZE
5412 If the field describes a variable-sized object, *PMODE is set to
5413 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5414 this case, but the address of the object can be found.
5416 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5417 look through nodes that serve as markers of a greater alignment than
5418 the one that can be deduced from the expression. These nodes make it
5419 possible for front-ends to prevent temporaries from being created by
5420 the middle-end on alignment considerations. For that purpose, the
5421 normal operating mode at high-level is to always pass FALSE so that
5422 the ultimate containing object is really returned; moreover, the
5423 associated predicate handled_component_p will always return TRUE
5424 on these nodes, thus indicating that they are essentially handled
5425 by get_inner_reference. TRUE should only be passed when the caller
5426 is scanning the expression in order to build another representation
5427 and specifically knows how to handle these nodes; as such, this is
5428 the normal operating mode in the RTL expanders. */
5431 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5432 HOST_WIDE_INT *pbitpos, tree *poffset,
5433 enum machine_mode *pmode, int *punsignedp,
5434 int *pvolatilep, bool keep_aligning)
5437 enum machine_mode mode = VOIDmode;
5438 tree offset = size_zero_node;
5439 tree bit_offset = bitsize_zero_node;
5442 /* First get the mode, signedness, and size. We do this from just the
5443 outermost expression. */
5444 if (TREE_CODE (exp) == COMPONENT_REF)
5446 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5447 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5448 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5450 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5452 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5454 size_tree = TREE_OPERAND (exp, 1);
5455 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5459 mode = TYPE_MODE (TREE_TYPE (exp));
5460 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5462 if (mode == BLKmode)
5463 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5465 *pbitsize = GET_MODE_BITSIZE (mode);
5470 if (! host_integerp (size_tree, 1))
5471 mode = BLKmode, *pbitsize = -1;
5473 *pbitsize = tree_low_cst (size_tree, 1);
5476 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5477 and find the ultimate containing object. */
5480 switch (TREE_CODE (exp))
5483 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5484 TREE_OPERAND (exp, 2));
5489 tree field = TREE_OPERAND (exp, 1);
5490 tree this_offset = component_ref_field_offset (exp);
5492 /* If this field hasn't been filled in yet, don't go past it.
5493 This should only happen when folding expressions made during
5494 type construction. */
5495 if (this_offset == 0)
5498 offset = size_binop (PLUS_EXPR, offset, this_offset);
5499 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5500 DECL_FIELD_BIT_OFFSET (field));
5502 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5507 case ARRAY_RANGE_REF:
5509 tree index = TREE_OPERAND (exp, 1);
5510 tree low_bound = array_ref_low_bound (exp);
5511 tree unit_size = array_ref_element_size (exp);
5513 /* We assume all arrays have sizes that are a multiple of a byte.
5514 First subtract the lower bound, if any, in the type of the
5515 index, then convert to sizetype and multiply by the size of
5516 the array element. */
5517 if (! integer_zerop (low_bound))
5518 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5521 offset = size_binop (PLUS_EXPR, offset,
5522 size_binop (MULT_EXPR,
5523 convert (sizetype, index),
5532 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5533 bitsize_int (*pbitsize));
5536 case VIEW_CONVERT_EXPR:
5537 if (keep_aligning && STRICT_ALIGNMENT
5538 && (TYPE_ALIGN (TREE_TYPE (exp))
5539 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5540 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5541 < BIGGEST_ALIGNMENT)
5542 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5543 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5551 /* If any reference in the chain is volatile, the effect is volatile. */
5552 if (TREE_THIS_VOLATILE (exp))
5555 exp = TREE_OPERAND (exp, 0);
5559 /* If OFFSET is constant, see if we can return the whole thing as a
5560 constant bit position. Otherwise, split it up. */
5561 if (host_integerp (offset, 0)
5562 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5564 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5565 && host_integerp (tem, 0))
5566 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5568 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5574 /* Return a tree of sizetype representing the size, in bytes, of the element
5575 of EXP, an ARRAY_REF. */
5578 array_ref_element_size (tree exp)
5580 tree aligned_size = TREE_OPERAND (exp, 3);
5581 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5583 /* If a size was specified in the ARRAY_REF, it's the size measured
5584 in alignment units of the element type. So multiply by that value. */
5587 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5588 sizetype from another type of the same width and signedness. */
5589 if (TREE_TYPE (aligned_size) != sizetype)
5590 aligned_size = fold_convert (sizetype, aligned_size);
5591 return size_binop (MULT_EXPR, aligned_size,
5592 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5595 /* Otherwise, take the size from that of the element type. Substitute
5596 any PLACEHOLDER_EXPR that we have. */
5598 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5601 /* Return a tree representing the lower bound of the array mentioned in
5602 EXP, an ARRAY_REF. */
5605 array_ref_low_bound (tree exp)
5607 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5609 /* If a lower bound is specified in EXP, use it. */
5610 if (TREE_OPERAND (exp, 2))
5611 return TREE_OPERAND (exp, 2);
5613 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5614 substituting for a PLACEHOLDER_EXPR as needed. */
5615 if (domain_type && TYPE_MIN_VALUE (domain_type))
5616 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5618 /* Otherwise, return a zero of the appropriate type. */
5619 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5622 /* Return a tree representing the upper bound of the array mentioned in
5623 EXP, an ARRAY_REF. */
5626 array_ref_up_bound (tree exp)
5628 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5630 /* If there is a domain type and it has an upper bound, use it, substituting
5631 for a PLACEHOLDER_EXPR as needed. */
5632 if (domain_type && TYPE_MAX_VALUE (domain_type))
5633 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5635 /* Otherwise fail. */
5639 /* Return a tree representing the offset, in bytes, of the field referenced
5640 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5643 component_ref_field_offset (tree exp)
5645 tree aligned_offset = TREE_OPERAND (exp, 2);
5646 tree field = TREE_OPERAND (exp, 1);
5648 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5649 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5653 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5654 sizetype from another type of the same width and signedness. */
5655 if (TREE_TYPE (aligned_offset) != sizetype)
5656 aligned_offset = fold_convert (sizetype, aligned_offset);
5657 return size_binop (MULT_EXPR, aligned_offset,
5658 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5661 /* Otherwise, take the offset from that of the field. Substitute
5662 any PLACEHOLDER_EXPR that we have. */
5664 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5667 /* Return 1 if T is an expression that get_inner_reference handles. */
5670 handled_component_p (tree t)
5672 switch (TREE_CODE (t))
5677 case ARRAY_RANGE_REF:
5678 case VIEW_CONVERT_EXPR:
5688 /* Given an rtx VALUE that may contain additions and multiplications, return
5689 an equivalent value that just refers to a register, memory, or constant.
5690 This is done by generating instructions to perform the arithmetic and
5691 returning a pseudo-register containing the value.
5693 The returned value may be a REG, SUBREG, MEM or constant. */
5696 force_operand (rtx value, rtx target)
5699 /* Use subtarget as the target for operand 0 of a binary operation. */
5700 rtx subtarget = get_subtarget (target);
5701 enum rtx_code code = GET_CODE (value);
5703 /* Check for subreg applied to an expression produced by loop optimizer. */
5705 && !REG_P (SUBREG_REG (value))
5706 && !MEM_P (SUBREG_REG (value)))
5708 value = simplify_gen_subreg (GET_MODE (value),
5709 force_reg (GET_MODE (SUBREG_REG (value)),
5710 force_operand (SUBREG_REG (value),
5712 GET_MODE (SUBREG_REG (value)),
5713 SUBREG_BYTE (value));
5714 code = GET_CODE (value);
5717 /* Check for a PIC address load. */
5718 if ((code == PLUS || code == MINUS)
5719 && XEXP (value, 0) == pic_offset_table_rtx
5720 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5721 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5722 || GET_CODE (XEXP (value, 1)) == CONST))
5725 subtarget = gen_reg_rtx (GET_MODE (value));
5726 emit_move_insn (subtarget, value);
5730 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5733 target = gen_reg_rtx (GET_MODE (value));
5734 convert_move (target, force_operand (XEXP (value, 0), NULL),
5735 code == ZERO_EXTEND);
5739 if (ARITHMETIC_P (value))
5741 op2 = XEXP (value, 1);
5742 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5744 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5747 op2 = negate_rtx (GET_MODE (value), op2);
5750 /* Check for an addition with OP2 a constant integer and our first
5751 operand a PLUS of a virtual register and something else. In that
5752 case, we want to emit the sum of the virtual register and the
5753 constant first and then add the other value. This allows virtual
5754 register instantiation to simply modify the constant rather than
5755 creating another one around this addition. */
5756 if (code == PLUS && GET_CODE (op2) == CONST_INT
5757 && GET_CODE (XEXP (value, 0)) == PLUS
5758 && REG_P (XEXP (XEXP (value, 0), 0))
5759 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5760 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5762 rtx temp = expand_simple_binop (GET_MODE (value), code,
5763 XEXP (XEXP (value, 0), 0), op2,
5764 subtarget, 0, OPTAB_LIB_WIDEN);
5765 return expand_simple_binop (GET_MODE (value), code, temp,
5766 force_operand (XEXP (XEXP (value,
5768 target, 0, OPTAB_LIB_WIDEN);
5771 op1 = force_operand (XEXP (value, 0), subtarget);
5772 op2 = force_operand (op2, NULL_RTX);
5776 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5778 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5779 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5780 target, 1, OPTAB_LIB_WIDEN);
5782 return expand_divmod (0,
5783 FLOAT_MODE_P (GET_MODE (value))
5784 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5785 GET_MODE (value), op1, op2, target, 0);
5788 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5792 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5796 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5800 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5801 target, 0, OPTAB_LIB_WIDEN);
5804 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5805 target, 1, OPTAB_LIB_WIDEN);
5808 if (UNARY_P (value))
5810 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5811 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5814 #ifdef INSN_SCHEDULING
5815 /* On machines that have insn scheduling, we want all memory reference to be
5816 explicit, so we need to deal with such paradoxical SUBREGs. */
5817 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5818 && (GET_MODE_SIZE (GET_MODE (value))
5819 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5821 = simplify_gen_subreg (GET_MODE (value),
5822 force_reg (GET_MODE (SUBREG_REG (value)),
5823 force_operand (SUBREG_REG (value),
5825 GET_MODE (SUBREG_REG (value)),
5826 SUBREG_BYTE (value));
5832 /* Subroutine of expand_expr: return nonzero iff there is no way that
5833 EXP can reference X, which is being modified. TOP_P is nonzero if this
5834 call is going to be used to determine whether we need a temporary
5835 for EXP, as opposed to a recursive call to this function.
5837 It is always safe for this routine to return zero since it merely
5838 searches for optimization opportunities. */
5841 safe_from_p (rtx x, tree exp, int top_p)
5847 /* If EXP has varying size, we MUST use a target since we currently
5848 have no way of allocating temporaries of variable size
5849 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5850 So we assume here that something at a higher level has prevented a
5851 clash. This is somewhat bogus, but the best we can do. Only
5852 do this when X is BLKmode and when we are at the top level. */
5853 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5854 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5855 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5856 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5857 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5859 && GET_MODE (x) == BLKmode)
5860 /* If X is in the outgoing argument area, it is always safe. */
5862 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5863 || (GET_CODE (XEXP (x, 0)) == PLUS
5864 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5867 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5868 find the underlying pseudo. */
5869 if (GET_CODE (x) == SUBREG)
5872 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5876 /* Now look at our tree code and possibly recurse. */
5877 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5879 case tcc_declaration:
5880 exp_rtl = DECL_RTL_IF_SET (exp);
5886 case tcc_exceptional:
5887 if (TREE_CODE (exp) == TREE_LIST)
5891 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5893 exp = TREE_CHAIN (exp);
5896 if (TREE_CODE (exp) != TREE_LIST)
5897 return safe_from_p (x, exp, 0);
5900 else if (TREE_CODE (exp) == ERROR_MARK)
5901 return 1; /* An already-visited SAVE_EXPR? */
5906 /* The only case we look at here is the DECL_INITIAL inside a
5908 return (TREE_CODE (exp) != DECL_EXPR
5909 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5910 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5911 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5914 case tcc_comparison:
5915 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5920 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5922 case tcc_expression:
5924 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5925 the expression. If it is set, we conflict iff we are that rtx or
5926 both are in memory. Otherwise, we check all operands of the
5927 expression recursively. */
5929 switch (TREE_CODE (exp))
5932 /* If the operand is static or we are static, we can't conflict.
5933 Likewise if we don't conflict with the operand at all. */
5934 if (staticp (TREE_OPERAND (exp, 0))
5935 || TREE_STATIC (exp)
5936 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5939 /* Otherwise, the only way this can conflict is if we are taking
5940 the address of a DECL a that address if part of X, which is
5942 exp = TREE_OPERAND (exp, 0);
5945 if (!DECL_RTL_SET_P (exp)
5946 || !MEM_P (DECL_RTL (exp)))
5949 exp_rtl = XEXP (DECL_RTL (exp), 0);
5953 case MISALIGNED_INDIRECT_REF:
5954 case ALIGN_INDIRECT_REF:
5957 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5958 get_alias_set (exp)))
5963 /* Assume that the call will clobber all hard registers and
5965 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5970 case WITH_CLEANUP_EXPR:
5971 case CLEANUP_POINT_EXPR:
5972 /* Lowered by gimplify.c. */
5976 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5982 /* If we have an rtx, we do not need to scan our operands. */
5986 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5987 for (i = 0; i < nops; i++)
5988 if (TREE_OPERAND (exp, i) != 0
5989 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5992 /* If this is a language-specific tree code, it may require
5993 special handling. */
5994 if ((unsigned int) TREE_CODE (exp)
5995 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5996 && !lang_hooks.safe_from_p (x, exp))
6001 /* Should never get a type here. */
6005 /* If we have an rtl, find any enclosed object. Then see if we conflict
6009 if (GET_CODE (exp_rtl) == SUBREG)
6011 exp_rtl = SUBREG_REG (exp_rtl);
6013 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6017 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6018 are memory and they conflict. */
6019 return ! (rtx_equal_p (x, exp_rtl)
6020 || (MEM_P (x) && MEM_P (exp_rtl)
6021 && true_dependence (exp_rtl, VOIDmode, x,
6022 rtx_addr_varies_p)));
6025 /* If we reach here, it is safe. */
6030 /* Return the highest power of two that EXP is known to be a multiple of.
6031 This is used in updating alignment of MEMs in array references. */
6033 static unsigned HOST_WIDE_INT
6034 highest_pow2_factor (tree exp)
6036 unsigned HOST_WIDE_INT c0, c1;
6038 switch (TREE_CODE (exp))
6041 /* We can find the lowest bit that's a one. If the low
6042 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6043 We need to handle this case since we can find it in a COND_EXPR,
6044 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6045 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6047 if (TREE_CONSTANT_OVERFLOW (exp))
6048 return BIGGEST_ALIGNMENT;
6051 /* Note: tree_low_cst is intentionally not used here,
6052 we don't care about the upper bits. */
6053 c0 = TREE_INT_CST_LOW (exp);
6055 return c0 ? c0 : BIGGEST_ALIGNMENT;
6059 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6060 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6061 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6062 return MIN (c0, c1);
6065 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6066 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6069 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6071 if (integer_pow2p (TREE_OPERAND (exp, 1))
6072 && host_integerp (TREE_OPERAND (exp, 1), 1))
6074 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6075 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6076 return MAX (1, c0 / c1);
6080 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6082 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6085 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6088 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6089 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6090 return MIN (c0, c1);
6099 /* Similar, except that the alignment requirements of TARGET are
6100 taken into account. Assume it is at least as aligned as its
6101 type, unless it is a COMPONENT_REF in which case the layout of
6102 the structure gives the alignment. */
6104 static unsigned HOST_WIDE_INT
6105 highest_pow2_factor_for_target (tree target, tree exp)
6107 unsigned HOST_WIDE_INT target_align, factor;
6109 factor = highest_pow2_factor (exp);
6110 if (TREE_CODE (target) == COMPONENT_REF)
6111 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6113 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6114 return MAX (factor, target_align);
6117 /* Expands variable VAR. */
6120 expand_var (tree var)
6122 if (DECL_EXTERNAL (var))
6125 if (TREE_STATIC (var))
6126 /* If this is an inlined copy of a static local variable,
6127 look up the original decl. */
6128 var = DECL_ORIGIN (var);
6130 if (TREE_STATIC (var)
6131 ? !TREE_ASM_WRITTEN (var)
6132 : !DECL_RTL_SET_P (var))
6134 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6135 /* Should be ignored. */;
6136 else if (lang_hooks.expand_decl (var))
6138 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6140 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6141 rest_of_decl_compilation (var, 0, 0);
6143 /* No expansion needed. */
6144 gcc_assert (TREE_CODE (var) == TYPE_DECL
6145 || TREE_CODE (var) == CONST_DECL
6146 || TREE_CODE (var) == FUNCTION_DECL
6147 || TREE_CODE (var) == LABEL_DECL);
6151 /* Subroutine of expand_expr. Expand the two operands of a binary
6152 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6153 The value may be stored in TARGET if TARGET is nonzero. The
6154 MODIFIER argument is as documented by expand_expr. */
6157 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6158 enum expand_modifier modifier)
6160 if (! safe_from_p (target, exp1, 1))
6162 if (operand_equal_p (exp0, exp1, 0))
6164 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6165 *op1 = copy_rtx (*op0);
6169 /* If we need to preserve evaluation order, copy exp0 into its own
6170 temporary variable so that it can't be clobbered by exp1. */
6171 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6172 exp0 = save_expr (exp0);
6173 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6174 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6179 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6180 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6183 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6184 enum expand_modifier modifier)
6186 rtx result, subtarget;
6188 HOST_WIDE_INT bitsize, bitpos;
6189 int volatilep, unsignedp;
6190 enum machine_mode mode1;
6192 /* If we are taking the address of a constant and are at the top level,
6193 we have to use output_constant_def since we can't call force_const_mem
6195 /* ??? This should be considered a front-end bug. We should not be
6196 generating ADDR_EXPR of something that isn't an LVALUE. The only
6197 exception here is STRING_CST. */
6198 if (TREE_CODE (exp) == CONSTRUCTOR
6199 || CONSTANT_CLASS_P (exp))
6200 return XEXP (output_constant_def (exp, 0), 0);
6202 /* Everything must be something allowed by is_gimple_addressable. */
6203 switch (TREE_CODE (exp))
6206 /* This case will happen via recursion for &a->b. */
6207 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6210 /* Recurse and make the output_constant_def clause above handle this. */
6211 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6215 /* The real part of the complex number is always first, therefore
6216 the address is the same as the address of the parent object. */
6219 inner = TREE_OPERAND (exp, 0);
6223 /* The imaginary part of the complex number is always second.
6224 The expression is therefore always offset by the size of the
6227 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6228 inner = TREE_OPERAND (exp, 0);
6232 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6233 expand_expr, as that can have various side effects; LABEL_DECLs for
6234 example, may not have their DECL_RTL set yet. Assume language
6235 specific tree nodes can be expanded in some interesting way. */
6237 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6239 result = expand_expr (exp, target, tmode,
6240 modifier == EXPAND_INITIALIZER
6241 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6243 /* If the DECL isn't in memory, then the DECL wasn't properly
6244 marked TREE_ADDRESSABLE, which will be either a front-end
6245 or a tree optimizer bug. */
6246 gcc_assert (MEM_P (result));
6247 result = XEXP (result, 0);
6249 /* ??? Is this needed anymore? */
6250 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6252 assemble_external (exp);
6253 TREE_USED (exp) = 1;
6256 if (modifier != EXPAND_INITIALIZER
6257 && modifier != EXPAND_CONST_ADDRESS)
6258 result = force_operand (result, target);
6262 /* Pass FALSE as the last argument to get_inner_reference although
6263 we are expanding to RTL. The rationale is that we know how to
6264 handle "aligning nodes" here: we can just bypass them because
6265 they won't change the final object whose address will be returned
6266 (they actually exist only for that purpose). */
6267 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6268 &mode1, &unsignedp, &volatilep, false);
6272 /* We must have made progress. */
6273 gcc_assert (inner != exp);
6275 subtarget = offset || bitpos ? NULL_RTX : target;
6276 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6282 if (modifier != EXPAND_NORMAL)
6283 result = force_operand (result, NULL);
6284 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6286 result = convert_memory_address (tmode, result);
6287 tmp = convert_memory_address (tmode, tmp);
6289 if (modifier == EXPAND_SUM)
6290 result = gen_rtx_PLUS (tmode, result, tmp);
6293 subtarget = bitpos ? NULL_RTX : target;
6294 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6295 1, OPTAB_LIB_WIDEN);
6301 /* Someone beforehand should have rejected taking the address
6302 of such an object. */
6303 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6305 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6306 if (modifier < EXPAND_SUM)
6307 result = force_operand (result, target);
6313 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6314 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6317 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6318 enum expand_modifier modifier)
6320 enum machine_mode rmode;
6323 /* Target mode of VOIDmode says "whatever's natural". */
6324 if (tmode == VOIDmode)
6325 tmode = TYPE_MODE (TREE_TYPE (exp));
6327 /* We can get called with some Weird Things if the user does silliness
6328 like "(short) &a". In that case, convert_memory_address won't do
6329 the right thing, so ignore the given target mode. */
6330 if (tmode != Pmode && tmode != ptr_mode)
6333 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6336 /* Despite expand_expr claims concerning ignoring TMODE when not
6337 strictly convenient, stuff breaks if we don't honor it. Note
6338 that combined with the above, we only do this for pointer modes. */
6339 rmode = GET_MODE (result);
6340 if (rmode == VOIDmode)
6343 result = convert_memory_address (tmode, result);
6349 /* expand_expr: generate code for computing expression EXP.
6350 An rtx for the computed value is returned. The value is never null.
6351 In the case of a void EXP, const0_rtx is returned.
6353 The value may be stored in TARGET if TARGET is nonzero.
6354 TARGET is just a suggestion; callers must assume that
6355 the rtx returned may not be the same as TARGET.
6357 If TARGET is CONST0_RTX, it means that the value will be ignored.
6359 If TMODE is not VOIDmode, it suggests generating the
6360 result in mode TMODE. But this is done only when convenient.
6361 Otherwise, TMODE is ignored and the value generated in its natural mode.
6362 TMODE is just a suggestion; callers must assume that
6363 the rtx returned may not have mode TMODE.
6365 Note that TARGET may have neither TMODE nor MODE. In that case, it
6366 probably will not be used.
6368 If MODIFIER is EXPAND_SUM then when EXP is an addition
6369 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6370 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6371 products as above, or REG or MEM, or constant.
6372 Ordinarily in such cases we would output mul or add instructions
6373 and then return a pseudo reg containing the sum.
6375 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6376 it also marks a label as absolutely required (it can't be dead).
6377 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6378 This is used for outputting expressions used in initializers.
6380 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6381 with a constant address even if that address is not normally legitimate.
6382 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6384 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6385 a call parameter. Such targets require special care as we haven't yet
6386 marked TARGET so that it's safe from being trashed by libcalls. We
6387 don't want to use TARGET for anything but the final result;
6388 Intermediate values must go elsewhere. Additionally, calls to
6389 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6391 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6392 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6393 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6394 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6397 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6398 enum expand_modifier, rtx *);
6401 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6402 enum expand_modifier modifier, rtx *alt_rtl)
6405 rtx ret, last = NULL;
6407 /* Handle ERROR_MARK before anybody tries to access its type. */
6408 if (TREE_CODE (exp) == ERROR_MARK
6409 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6411 ret = CONST0_RTX (tmode);
6412 return ret ? ret : const0_rtx;
6415 if (flag_non_call_exceptions)
6417 rn = lookup_stmt_eh_region (exp);
6418 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6420 last = get_last_insn ();
6423 /* If this is an expression of some kind and it has an associated line
6424 number, then emit the line number before expanding the expression.
6426 We need to save and restore the file and line information so that
6427 errors discovered during expansion are emitted with the right
6428 information. It would be better of the diagnostic routines
6429 used the file/line information embedded in the tree nodes rather
6431 if (cfun && EXPR_HAS_LOCATION (exp))
6433 location_t saved_location = input_location;
6434 input_location = EXPR_LOCATION (exp);
6435 emit_line_note (input_location);
6437 /* Record where the insns produced belong. */
6438 record_block_change (TREE_BLOCK (exp));
6440 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6442 input_location = saved_location;
6446 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6449 /* If using non-call exceptions, mark all insns that may trap.
6450 expand_call() will mark CALL_INSNs before we get to this code,
6451 but it doesn't handle libcalls, and these may trap. */
6455 for (insn = next_real_insn (last); insn;
6456 insn = next_real_insn (insn))
6458 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6459 /* If we want exceptions for non-call insns, any
6460 may_trap_p instruction may throw. */
6461 && GET_CODE (PATTERN (insn)) != CLOBBER
6462 && GET_CODE (PATTERN (insn)) != USE
6463 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6465 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6475 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6476 enum expand_modifier modifier, rtx *alt_rtl)
6479 tree type = TREE_TYPE (exp);
6481 enum machine_mode mode;
6482 enum tree_code code = TREE_CODE (exp);
6484 rtx subtarget, original_target;
6487 bool reduce_bit_field = false;
6488 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6489 ? reduce_to_bit_field_precision ((expr), \
6494 mode = TYPE_MODE (type);
6495 unsignedp = TYPE_UNSIGNED (type);
6496 if (lang_hooks.reduce_bit_field_operations
6497 && TREE_CODE (type) == INTEGER_TYPE
6498 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6500 /* An operation in what may be a bit-field type needs the
6501 result to be reduced to the precision of the bit-field type,
6502 which is narrower than that of the type's mode. */
6503 reduce_bit_field = true;
6504 if (modifier == EXPAND_STACK_PARM)
6508 /* Use subtarget as the target for operand 0 of a binary operation. */
6509 subtarget = get_subtarget (target);
6510 original_target = target;
6511 ignore = (target == const0_rtx
6512 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6513 || code == CONVERT_EXPR || code == COND_EXPR
6514 || code == VIEW_CONVERT_EXPR)
6515 && TREE_CODE (type) == VOID_TYPE));
6517 /* If we are going to ignore this result, we need only do something
6518 if there is a side-effect somewhere in the expression. If there
6519 is, short-circuit the most common cases here. Note that we must
6520 not call expand_expr with anything but const0_rtx in case this
6521 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6525 if (! TREE_SIDE_EFFECTS (exp))
6528 /* Ensure we reference a volatile object even if value is ignored, but
6529 don't do this if all we are doing is taking its address. */
6530 if (TREE_THIS_VOLATILE (exp)
6531 && TREE_CODE (exp) != FUNCTION_DECL
6532 && mode != VOIDmode && mode != BLKmode
6533 && modifier != EXPAND_CONST_ADDRESS)
6535 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6537 temp = copy_to_reg (temp);
6541 if (TREE_CODE_CLASS (code) == tcc_unary
6542 || code == COMPONENT_REF || code == INDIRECT_REF)
6543 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6546 else if (TREE_CODE_CLASS (code) == tcc_binary
6547 || TREE_CODE_CLASS (code) == tcc_comparison
6548 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6550 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6551 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6554 else if (code == BIT_FIELD_REF)
6556 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6557 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6558 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6565 /* If will do cse, generate all results into pseudo registers
6566 since 1) that allows cse to find more things
6567 and 2) otherwise cse could produce an insn the machine
6568 cannot support. An exception is a CONSTRUCTOR into a multi-word
6569 MEM: that's much more likely to be most efficient into the MEM.
6570 Another is a CALL_EXPR which must return in memory. */
6572 if (! cse_not_expected && mode != BLKmode && target
6573 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6574 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6575 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6582 tree function = decl_function_context (exp);
6584 temp = label_rtx (exp);
6585 temp = gen_rtx_LABEL_REF (Pmode, temp);
6587 if (function != current_function_decl
6589 LABEL_REF_NONLOCAL_P (temp) = 1;
6591 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6596 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6601 /* If a static var's type was incomplete when the decl was written,
6602 but the type is complete now, lay out the decl now. */
6603 if (DECL_SIZE (exp) == 0
6604 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6605 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6606 layout_decl (exp, 0);
6608 /* ... fall through ... */
6612 gcc_assert (DECL_RTL (exp));
6614 /* Ensure variable marked as used even if it doesn't go through
6615 a parser. If it hasn't be used yet, write out an external
6617 if (! TREE_USED (exp))
6619 assemble_external (exp);
6620 TREE_USED (exp) = 1;
6623 /* Show we haven't gotten RTL for this yet. */
6626 /* Variables inherited from containing functions should have
6627 been lowered by this point. */
6628 context = decl_function_context (exp);
6629 gcc_assert (!context
6630 || context == current_function_decl
6631 || TREE_STATIC (exp)
6632 /* ??? C++ creates functions that are not TREE_STATIC. */
6633 || TREE_CODE (exp) == FUNCTION_DECL);
6635 /* This is the case of an array whose size is to be determined
6636 from its initializer, while the initializer is still being parsed.
6639 if (MEM_P (DECL_RTL (exp))
6640 && REG_P (XEXP (DECL_RTL (exp), 0)))
6641 temp = validize_mem (DECL_RTL (exp));
6643 /* If DECL_RTL is memory, we are in the normal case and either
6644 the address is not valid or it is not a register and -fforce-addr
6645 is specified, get the address into a register. */
6647 else if (MEM_P (DECL_RTL (exp))
6648 && modifier != EXPAND_CONST_ADDRESS
6649 && modifier != EXPAND_SUM
6650 && modifier != EXPAND_INITIALIZER
6651 && (! memory_address_p (DECL_MODE (exp),
6652 XEXP (DECL_RTL (exp), 0))
6654 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6657 *alt_rtl = DECL_RTL (exp);
6658 temp = replace_equiv_address (DECL_RTL (exp),
6659 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6662 /* If we got something, return it. But first, set the alignment
6663 if the address is a register. */
6666 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6667 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6672 /* If the mode of DECL_RTL does not match that of the decl, it
6673 must be a promoted value. We return a SUBREG of the wanted mode,
6674 but mark it so that we know that it was already extended. */
6676 if (REG_P (DECL_RTL (exp))
6677 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6679 enum machine_mode pmode;
6681 /* Get the signedness used for this variable. Ensure we get the
6682 same mode we got when the variable was declared. */
6683 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6684 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6685 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6687 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6688 SUBREG_PROMOTED_VAR_P (temp) = 1;
6689 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6693 return DECL_RTL (exp);
6696 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6697 TREE_INT_CST_HIGH (exp), mode);
6699 /* ??? If overflow is set, fold will have done an incomplete job,
6700 which can result in (plus xx (const_int 0)), which can get
6701 simplified by validate_replace_rtx during virtual register
6702 instantiation, which can result in unrecognizable insns.
6703 Avoid this by forcing all overflows into registers. */
6704 if (TREE_CONSTANT_OVERFLOW (exp)
6705 && modifier != EXPAND_INITIALIZER)
6706 temp = force_reg (mode, temp);
6711 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6712 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6713 return const_vector_from_tree (exp);
6715 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6716 TREE_VECTOR_CST_ELTS (exp)),
6717 ignore ? const0_rtx : target, tmode, modifier);
6720 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6723 /* If optimized, generate immediate CONST_DOUBLE
6724 which will be turned into memory by reload if necessary.
6726 We used to force a register so that loop.c could see it. But
6727 this does not allow gen_* patterns to perform optimizations with
6728 the constants. It also produces two insns in cases like "x = 1.0;".
6729 On most machines, floating-point constants are not permitted in
6730 many insns, so we'd end up copying it to a register in any case.
6732 Now, we do the copying in expand_binop, if appropriate. */
6733 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6734 TYPE_MODE (TREE_TYPE (exp)));
6737 /* Handle evaluating a complex constant in a CONCAT target. */
6738 if (original_target && GET_CODE (original_target) == CONCAT)
6740 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6743 rtarg = XEXP (original_target, 0);
6744 itarg = XEXP (original_target, 1);
6746 /* Move the real and imaginary parts separately. */
6747 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6748 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6751 emit_move_insn (rtarg, op0);
6753 emit_move_insn (itarg, op1);
6755 return original_target;
6758 /* ... fall through ... */
6761 temp = output_constant_def (exp, 1);
6763 /* temp contains a constant address.
6764 On RISC machines where a constant address isn't valid,
6765 make some insns to get that address into a register. */
6766 if (modifier != EXPAND_CONST_ADDRESS
6767 && modifier != EXPAND_INITIALIZER
6768 && modifier != EXPAND_SUM
6769 && (! memory_address_p (mode, XEXP (temp, 0))
6770 || flag_force_addr))
6771 return replace_equiv_address (temp,
6772 copy_rtx (XEXP (temp, 0)));
6777 tree val = TREE_OPERAND (exp, 0);
6778 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6780 if (!SAVE_EXPR_RESOLVED_P (exp))
6782 /* We can indeed still hit this case, typically via builtin
6783 expanders calling save_expr immediately before expanding
6784 something. Assume this means that we only have to deal
6785 with non-BLKmode values. */
6786 gcc_assert (GET_MODE (ret) != BLKmode);
6788 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6789 DECL_ARTIFICIAL (val) = 1;
6790 DECL_IGNORED_P (val) = 1;
6791 TREE_OPERAND (exp, 0) = val;
6792 SAVE_EXPR_RESOLVED_P (exp) = 1;
6794 if (!CONSTANT_P (ret))
6795 ret = copy_to_reg (ret);
6796 SET_DECL_RTL (val, ret);
6803 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6804 expand_goto (TREE_OPERAND (exp, 0));
6806 expand_computed_goto (TREE_OPERAND (exp, 0));
6810 /* If we don't need the result, just ensure we evaluate any
6816 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6817 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6822 /* All elts simple constants => refer to a constant in memory. But
6823 if this is a non-BLKmode mode, let it store a field at a time
6824 since that should make a CONST_INT or CONST_DOUBLE when we
6825 fold. Likewise, if we have a target we can use, it is best to
6826 store directly into the target unless the type is large enough
6827 that memcpy will be used. If we are making an initializer and
6828 all operands are constant, put it in memory as well.
6830 FIXME: Avoid trying to fill vector constructors piece-meal.
6831 Output them with output_constant_def below unless we're sure
6832 they're zeros. This should go away when vector initializers
6833 are treated like VECTOR_CST instead of arrays.
6835 else if ((TREE_STATIC (exp)
6836 && ((mode == BLKmode
6837 && ! (target != 0 && safe_from_p (target, exp, 1)))
6838 || TREE_ADDRESSABLE (exp)
6839 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6840 && (! MOVE_BY_PIECES_P
6841 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6843 && ! mostly_zeros_p (exp))))
6844 || ((modifier == EXPAND_INITIALIZER
6845 || modifier == EXPAND_CONST_ADDRESS)
6846 && TREE_CONSTANT (exp)))
6848 rtx constructor = output_constant_def (exp, 1);
6850 if (modifier != EXPAND_CONST_ADDRESS
6851 && modifier != EXPAND_INITIALIZER
6852 && modifier != EXPAND_SUM)
6853 constructor = validize_mem (constructor);
6859 /* Handle calls that pass values in multiple non-contiguous
6860 locations. The Irix 6 ABI has examples of this. */
6861 if (target == 0 || ! safe_from_p (target, exp, 1)
6862 || GET_CODE (target) == PARALLEL
6863 || modifier == EXPAND_STACK_PARM)
6865 = assign_temp (build_qualified_type (type,
6867 | (TREE_READONLY (exp)
6868 * TYPE_QUAL_CONST))),
6869 0, TREE_ADDRESSABLE (exp), 1);
6871 store_constructor (exp, target, 0, int_expr_size (exp));
6875 case MISALIGNED_INDIRECT_REF:
6876 case ALIGN_INDIRECT_REF:
6879 tree exp1 = TREE_OPERAND (exp, 0);
6881 if (modifier != EXPAND_WRITE)
6885 t = fold_read_from_constant_string (exp);
6887 return expand_expr (t, target, tmode, modifier);
6890 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6891 op0 = memory_address (mode, op0);
6893 if (code == ALIGN_INDIRECT_REF)
6895 int align = TYPE_ALIGN_UNIT (type);
6896 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6897 op0 = memory_address (mode, op0);
6900 temp = gen_rtx_MEM (mode, op0);
6902 set_mem_attributes (temp, exp, 0);
6904 /* Resolve the misalignment now, so that we don't have to remember
6905 to resolve it later. Of course, this only works for reads. */
6906 /* ??? When we get around to supporting writes, we'll have to handle
6907 this in store_expr directly. The vectorizer isn't generating
6908 those yet, however. */
6909 if (code == MISALIGNED_INDIRECT_REF)
6914 gcc_assert (modifier == EXPAND_NORMAL);
6916 /* The vectorizer should have already checked the mode. */
6917 icode = movmisalign_optab->handlers[mode].insn_code;
6918 gcc_assert (icode != CODE_FOR_nothing);
6920 /* We've already validated the memory, and we're creating a
6921 new pseudo destination. The predicates really can't fail. */
6922 reg = gen_reg_rtx (mode);
6924 /* Nor can the insn generator. */
6925 insn = GEN_FCN (icode) (reg, temp);
6934 case TARGET_MEM_REF:
6936 struct mem_address addr;
6938 get_address_description (exp, &addr);
6939 op0 = addr_for_mem_ref (&addr, true);
6940 op0 = memory_address (mode, op0);
6941 temp = gen_rtx_MEM (mode, op0);
6942 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
6949 tree array = TREE_OPERAND (exp, 0);
6950 tree index = TREE_OPERAND (exp, 1);
6952 /* Fold an expression like: "foo"[2].
6953 This is not done in fold so it won't happen inside &.
6954 Don't fold if this is for wide characters since it's too
6955 difficult to do correctly and this is a very rare case. */
6957 if (modifier != EXPAND_CONST_ADDRESS
6958 && modifier != EXPAND_INITIALIZER
6959 && modifier != EXPAND_MEMORY)
6961 tree t = fold_read_from_constant_string (exp);
6964 return expand_expr (t, target, tmode, modifier);
6967 /* If this is a constant index into a constant array,
6968 just get the value from the array. Handle both the cases when
6969 we have an explicit constructor and when our operand is a variable
6970 that was declared const. */
6972 if (modifier != EXPAND_CONST_ADDRESS
6973 && modifier != EXPAND_INITIALIZER
6974 && modifier != EXPAND_MEMORY
6975 && TREE_CODE (array) == CONSTRUCTOR
6976 && ! TREE_SIDE_EFFECTS (array)
6977 && TREE_CODE (index) == INTEGER_CST)
6981 for (elem = CONSTRUCTOR_ELTS (array);
6982 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6983 elem = TREE_CHAIN (elem))
6986 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6987 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6991 else if (optimize >= 1
6992 && modifier != EXPAND_CONST_ADDRESS
6993 && modifier != EXPAND_INITIALIZER
6994 && modifier != EXPAND_MEMORY
6995 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6996 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6997 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6998 && targetm.binds_local_p (array))
7000 if (TREE_CODE (index) == INTEGER_CST)
7002 tree init = DECL_INITIAL (array);
7004 if (TREE_CODE (init) == CONSTRUCTOR)
7008 for (elem = CONSTRUCTOR_ELTS (init);
7010 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7011 elem = TREE_CHAIN (elem))
7014 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7015 return expand_expr (fold (TREE_VALUE (elem)), target,
7018 else if (TREE_CODE (init) == STRING_CST
7019 && 0 > compare_tree_int (index,
7020 TREE_STRING_LENGTH (init)))
7022 tree type = TREE_TYPE (TREE_TYPE (init));
7023 enum machine_mode mode = TYPE_MODE (type);
7025 if (GET_MODE_CLASS (mode) == MODE_INT
7026 && GET_MODE_SIZE (mode) == 1)
7027 return gen_int_mode (TREE_STRING_POINTER (init)
7028 [TREE_INT_CST_LOW (index)], mode);
7033 goto normal_inner_ref;
7036 /* If the operand is a CONSTRUCTOR, we can just extract the
7037 appropriate field if it is present. */
7038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7042 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7043 elt = TREE_CHAIN (elt))
7044 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7045 /* We can normally use the value of the field in the
7046 CONSTRUCTOR. However, if this is a bitfield in
7047 an integral mode that we can fit in a HOST_WIDE_INT,
7048 we must mask only the number of bits in the bitfield,
7049 since this is done implicitly by the constructor. If
7050 the bitfield does not meet either of those conditions,
7051 we can't do this optimization. */
7052 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7053 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7055 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7056 <= HOST_BITS_PER_WIDE_INT))))
7058 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7059 && modifier == EXPAND_STACK_PARM)
7061 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7062 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7064 HOST_WIDE_INT bitsize
7065 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7066 enum machine_mode imode
7067 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7069 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7071 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7072 op0 = expand_and (imode, op0, op1, target);
7077 = build_int_cst (NULL_TREE,
7078 GET_MODE_BITSIZE (imode) - bitsize);
7080 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7082 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7090 goto normal_inner_ref;
7093 case ARRAY_RANGE_REF:
7096 enum machine_mode mode1;
7097 HOST_WIDE_INT bitsize, bitpos;
7100 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7101 &mode1, &unsignedp, &volatilep, true);
7104 /* If we got back the original object, something is wrong. Perhaps
7105 we are evaluating an expression too early. In any event, don't
7106 infinitely recurse. */
7107 gcc_assert (tem != exp);
7109 /* If TEM's type is a union of variable size, pass TARGET to the inner
7110 computation, since it will need a temporary and TARGET is known
7111 to have to do. This occurs in unchecked conversion in Ada. */
7115 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7116 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7118 && modifier != EXPAND_STACK_PARM
7119 ? target : NULL_RTX),
7121 (modifier == EXPAND_INITIALIZER
7122 || modifier == EXPAND_CONST_ADDRESS
7123 || modifier == EXPAND_STACK_PARM)
7124 ? modifier : EXPAND_NORMAL);
7126 /* If this is a constant, put it into a register if it is a
7127 legitimate constant and OFFSET is 0 and memory if it isn't. */
7128 if (CONSTANT_P (op0))
7130 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7131 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7133 op0 = force_reg (mode, op0);
7135 op0 = validize_mem (force_const_mem (mode, op0));
7138 /* Otherwise, if this object not in memory and we either have an
7139 offset or a BLKmode result, put it there. This case can't occur in
7140 C, but can in Ada if we have unchecked conversion of an expression
7141 from a scalar type to an array or record type or for an
7142 ARRAY_RANGE_REF whose type is BLKmode. */
7143 else if (!MEM_P (op0)
7145 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7147 tree nt = build_qualified_type (TREE_TYPE (tem),
7148 (TYPE_QUALS (TREE_TYPE (tem))
7149 | TYPE_QUAL_CONST));
7150 rtx memloc = assign_temp (nt, 1, 1, 1);
7152 emit_move_insn (memloc, op0);
7158 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7161 gcc_assert (MEM_P (op0));
7163 #ifdef POINTERS_EXTEND_UNSIGNED
7164 if (GET_MODE (offset_rtx) != Pmode)
7165 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7167 if (GET_MODE (offset_rtx) != ptr_mode)
7168 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7171 if (GET_MODE (op0) == BLKmode
7172 /* A constant address in OP0 can have VOIDmode, we must
7173 not try to call force_reg in that case. */
7174 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7176 && (bitpos % bitsize) == 0
7177 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7178 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7180 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7184 op0 = offset_address (op0, offset_rtx,
7185 highest_pow2_factor (offset));
7188 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7189 record its alignment as BIGGEST_ALIGNMENT. */
7190 if (MEM_P (op0) && bitpos == 0 && offset != 0
7191 && is_aligning_offset (offset, tem))
7192 set_mem_align (op0, BIGGEST_ALIGNMENT);
7194 /* Don't forget about volatility even if this is a bitfield. */
7195 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7197 if (op0 == orig_op0)
7198 op0 = copy_rtx (op0);
7200 MEM_VOLATILE_P (op0) = 1;
7203 /* The following code doesn't handle CONCAT.
7204 Assume only bitpos == 0 can be used for CONCAT, due to
7205 one element arrays having the same mode as its element. */
7206 if (GET_CODE (op0) == CONCAT)
7208 gcc_assert (bitpos == 0
7209 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7213 /* In cases where an aligned union has an unaligned object
7214 as a field, we might be extracting a BLKmode value from
7215 an integer-mode (e.g., SImode) object. Handle this case
7216 by doing the extract into an object as wide as the field
7217 (which we know to be the width of a basic mode), then
7218 storing into memory, and changing the mode to BLKmode. */
7219 if (mode1 == VOIDmode
7220 || REG_P (op0) || GET_CODE (op0) == SUBREG
7221 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7224 && modifier != EXPAND_CONST_ADDRESS
7225 && modifier != EXPAND_INITIALIZER)
7226 /* If the field isn't aligned enough to fetch as a memref,
7227 fetch it as a bit field. */
7228 || (mode1 != BLKmode
7229 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7230 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7232 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7233 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7234 && ((modifier == EXPAND_CONST_ADDRESS
7235 || modifier == EXPAND_INITIALIZER)
7237 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7238 || (bitpos % BITS_PER_UNIT != 0)))
7239 /* If the type and the field are a constant size and the
7240 size of the type isn't the same size as the bitfield,
7241 we must use bitfield operations. */
7243 && TYPE_SIZE (TREE_TYPE (exp))
7244 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7245 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7248 enum machine_mode ext_mode = mode;
7250 if (ext_mode == BLKmode
7251 && ! (target != 0 && MEM_P (op0)
7253 && bitpos % BITS_PER_UNIT == 0))
7254 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7256 if (ext_mode == BLKmode)
7259 target = assign_temp (type, 0, 1, 1);
7264 /* In this case, BITPOS must start at a byte boundary and
7265 TARGET, if specified, must be a MEM. */
7266 gcc_assert (MEM_P (op0)
7267 && (!target || MEM_P (target))
7268 && !(bitpos % BITS_PER_UNIT));
7270 emit_block_move (target,
7271 adjust_address (op0, VOIDmode,
7272 bitpos / BITS_PER_UNIT),
7273 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7275 (modifier == EXPAND_STACK_PARM
7276 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7281 op0 = validize_mem (op0);
7283 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7284 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7286 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7287 (modifier == EXPAND_STACK_PARM
7288 ? NULL_RTX : target),
7289 ext_mode, ext_mode);
7291 /* If the result is a record type and BITSIZE is narrower than
7292 the mode of OP0, an integral mode, and this is a big endian
7293 machine, we must put the field into the high-order bits. */
7294 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7295 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7296 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7297 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7298 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7302 /* If the result type is BLKmode, store the data into a temporary
7303 of the appropriate type, but with the mode corresponding to the
7304 mode for the data we have (op0's mode). It's tempting to make
7305 this a constant type, since we know it's only being stored once,
7306 but that can cause problems if we are taking the address of this
7307 COMPONENT_REF because the MEM of any reference via that address
7308 will have flags corresponding to the type, which will not
7309 necessarily be constant. */
7310 if (mode == BLKmode)
7313 = assign_stack_temp_for_type
7314 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7316 emit_move_insn (new, op0);
7317 op0 = copy_rtx (new);
7318 PUT_MODE (op0, BLKmode);
7319 set_mem_attributes (op0, exp, 1);
7325 /* If the result is BLKmode, use that to access the object
7327 if (mode == BLKmode)
7330 /* Get a reference to just this component. */
7331 if (modifier == EXPAND_CONST_ADDRESS
7332 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7333 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7335 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7337 if (op0 == orig_op0)
7338 op0 = copy_rtx (op0);
7340 set_mem_attributes (op0, exp, 0);
7341 if (REG_P (XEXP (op0, 0)))
7342 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7344 MEM_VOLATILE_P (op0) |= volatilep;
7345 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7346 || modifier == EXPAND_CONST_ADDRESS
7347 || modifier == EXPAND_INITIALIZER)
7349 else if (target == 0)
7350 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7352 convert_move (target, op0, unsignedp);
7357 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7360 /* Check for a built-in function. */
7361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7362 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7364 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7366 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7367 == BUILT_IN_FRONTEND)
7368 return lang_hooks.expand_expr (exp, original_target,
7372 return expand_builtin (exp, target, subtarget, tmode, ignore);
7375 return expand_call (exp, target, ignore);
7377 case NON_LVALUE_EXPR:
7380 if (TREE_OPERAND (exp, 0) == error_mark_node)
7383 if (TREE_CODE (type) == UNION_TYPE)
7385 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7387 /* If both input and output are BLKmode, this conversion isn't doing
7388 anything except possibly changing memory attribute. */
7389 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7391 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7394 result = copy_rtx (result);
7395 set_mem_attributes (result, exp, 0);
7401 if (TYPE_MODE (type) != BLKmode)
7402 target = gen_reg_rtx (TYPE_MODE (type));
7404 target = assign_temp (type, 0, 1, 1);
7408 /* Store data into beginning of memory target. */
7409 store_expr (TREE_OPERAND (exp, 0),
7410 adjust_address (target, TYPE_MODE (valtype), 0),
7411 modifier == EXPAND_STACK_PARM);
7415 gcc_assert (REG_P (target));
7417 /* Store this field into a union of the proper type. */
7418 store_field (target,
7419 MIN ((int_size_in_bytes (TREE_TYPE
7420 (TREE_OPERAND (exp, 0)))
7422 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7423 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7427 /* Return the entire union. */
7431 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7433 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7436 /* If the signedness of the conversion differs and OP0 is
7437 a promoted SUBREG, clear that indication since we now
7438 have to do the proper extension. */
7439 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7440 && GET_CODE (op0) == SUBREG)
7441 SUBREG_PROMOTED_VAR_P (op0) = 0;
7443 return REDUCE_BIT_FIELD (op0);
7446 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7447 if (GET_MODE (op0) == mode)
7450 /* If OP0 is a constant, just convert it into the proper mode. */
7451 else if (CONSTANT_P (op0))
7453 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7454 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7456 if (modifier == EXPAND_INITIALIZER)
7457 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7458 subreg_lowpart_offset (mode,
7461 op0= convert_modes (mode, inner_mode, op0,
7462 TYPE_UNSIGNED (inner_type));
7465 else if (modifier == EXPAND_INITIALIZER)
7466 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7468 else if (target == 0)
7469 op0 = convert_to_mode (mode, op0,
7470 TYPE_UNSIGNED (TREE_TYPE
7471 (TREE_OPERAND (exp, 0))));
7474 convert_move (target, op0,
7475 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7479 return REDUCE_BIT_FIELD (op0);
7481 case VIEW_CONVERT_EXPR:
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7484 /* If the input and output modes are both the same, we are done.
7485 Otherwise, if neither mode is BLKmode and both are integral and within
7486 a word, we can use gen_lowpart. If neither is true, make sure the
7487 operand is in memory and convert the MEM to the new mode. */
7488 if (TYPE_MODE (type) == GET_MODE (op0))
7490 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7491 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7492 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7493 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7494 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7495 op0 = gen_lowpart (TYPE_MODE (type), op0);
7496 else if (!MEM_P (op0))
7498 /* If the operand is not a MEM, force it into memory. Since we
7499 are going to be be changing the mode of the MEM, don't call
7500 force_const_mem for constants because we don't allow pool
7501 constants to change mode. */
7502 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7504 gcc_assert (!TREE_ADDRESSABLE (exp));
7506 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7508 = assign_stack_temp_for_type
7509 (TYPE_MODE (inner_type),
7510 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7512 emit_move_insn (target, op0);
7516 /* At this point, OP0 is in the correct mode. If the output type is such
7517 that the operand is known to be aligned, indicate that it is.
7518 Otherwise, we need only be concerned about alignment for non-BLKmode
7522 op0 = copy_rtx (op0);
7524 if (TYPE_ALIGN_OK (type))
7525 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7526 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7527 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7529 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7530 HOST_WIDE_INT temp_size
7531 = MAX (int_size_in_bytes (inner_type),
7532 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7533 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7534 temp_size, 0, type);
7535 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7537 gcc_assert (!TREE_ADDRESSABLE (exp));
7539 if (GET_MODE (op0) == BLKmode)
7540 emit_block_move (new_with_op0_mode, op0,
7541 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7542 (modifier == EXPAND_STACK_PARM
7543 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7545 emit_move_insn (new_with_op0_mode, op0);
7550 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7556 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7557 something else, make sure we add the register to the constant and
7558 then to the other thing. This case can occur during strength
7559 reduction and doing it this way will produce better code if the
7560 frame pointer or argument pointer is eliminated.
7562 fold-const.c will ensure that the constant is always in the inner
7563 PLUS_EXPR, so the only case we need to do anything about is if
7564 sp, ap, or fp is our second argument, in which case we must swap
7565 the innermost first argument and our second argument. */
7567 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7568 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7569 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7570 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7571 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7572 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7574 tree t = TREE_OPERAND (exp, 1);
7576 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7577 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7580 /* If the result is to be ptr_mode and we are adding an integer to
7581 something, we might be forming a constant. So try to use
7582 plus_constant. If it produces a sum and we can't accept it,
7583 use force_operand. This allows P = &ARR[const] to generate
7584 efficient code on machines where a SYMBOL_REF is not a valid
7587 If this is an EXPAND_SUM call, always return the sum. */
7588 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7589 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7591 if (modifier == EXPAND_STACK_PARM)
7593 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7594 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7595 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7599 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7601 /* Use immed_double_const to ensure that the constant is
7602 truncated according to the mode of OP1, then sign extended
7603 to a HOST_WIDE_INT. Using the constant directly can result
7604 in non-canonical RTL in a 64x32 cross compile. */
7606 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7608 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7609 op1 = plus_constant (op1, INTVAL (constant_part));
7610 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7611 op1 = force_operand (op1, target);
7612 return REDUCE_BIT_FIELD (op1);
7615 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7616 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7617 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7621 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7622 (modifier == EXPAND_INITIALIZER
7623 ? EXPAND_INITIALIZER : EXPAND_SUM));
7624 if (! CONSTANT_P (op0))
7626 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7627 VOIDmode, modifier);
7628 /* Return a PLUS if modifier says it's OK. */
7629 if (modifier == EXPAND_SUM
7630 || modifier == EXPAND_INITIALIZER)
7631 return simplify_gen_binary (PLUS, mode, op0, op1);
7634 /* Use immed_double_const to ensure that the constant is
7635 truncated according to the mode of OP1, then sign extended
7636 to a HOST_WIDE_INT. Using the constant directly can result
7637 in non-canonical RTL in a 64x32 cross compile. */
7639 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7641 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7642 op0 = plus_constant (op0, INTVAL (constant_part));
7643 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7644 op0 = force_operand (op0, target);
7645 return REDUCE_BIT_FIELD (op0);
7649 /* No sense saving up arithmetic to be done
7650 if it's all in the wrong mode to form part of an address.
7651 And force_operand won't know whether to sign-extend or
7653 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7654 || mode != ptr_mode)
7656 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7657 subtarget, &op0, &op1, 0);
7658 if (op0 == const0_rtx)
7660 if (op1 == const0_rtx)
7665 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7666 subtarget, &op0, &op1, modifier);
7667 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7670 /* For initializers, we are allowed to return a MINUS of two
7671 symbolic constants. Here we handle all cases when both operands
7673 /* Handle difference of two symbolic constants,
7674 for the sake of an initializer. */
7675 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7676 && really_constant_p (TREE_OPERAND (exp, 0))
7677 && really_constant_p (TREE_OPERAND (exp, 1)))
7679 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7680 NULL_RTX, &op0, &op1, modifier);
7682 /* If the last operand is a CONST_INT, use plus_constant of
7683 the negated constant. Else make the MINUS. */
7684 if (GET_CODE (op1) == CONST_INT)
7685 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7687 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7690 /* No sense saving up arithmetic to be done
7691 if it's all in the wrong mode to form part of an address.
7692 And force_operand won't know whether to sign-extend or
7694 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7695 || mode != ptr_mode)
7698 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7699 subtarget, &op0, &op1, modifier);
7701 /* Convert A - const to A + (-const). */
7702 if (GET_CODE (op1) == CONST_INT)
7704 op1 = negate_rtx (mode, op1);
7705 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7711 /* If first operand is constant, swap them.
7712 Thus the following special case checks need only
7713 check the second operand. */
7714 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7716 tree t1 = TREE_OPERAND (exp, 0);
7717 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7718 TREE_OPERAND (exp, 1) = t1;
7721 /* Attempt to return something suitable for generating an
7722 indexed address, for machines that support that. */
7724 if (modifier == EXPAND_SUM && mode == ptr_mode
7725 && host_integerp (TREE_OPERAND (exp, 1), 0))
7727 tree exp1 = TREE_OPERAND (exp, 1);
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7733 op0 = force_operand (op0, NULL_RTX);
7735 op0 = copy_to_mode_reg (mode, op0);
7737 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7738 gen_int_mode (tree_low_cst (exp1, 0),
7739 TYPE_MODE (TREE_TYPE (exp1)))));
7742 if (modifier == EXPAND_STACK_PARM)
7745 /* Check for multiplying things that have been extended
7746 from a narrower type. If this machine supports multiplying
7747 in that narrower type with a result in the desired type,
7748 do it that way, and avoid the explicit type-conversion. */
7749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7750 && TREE_CODE (type) == INTEGER_TYPE
7751 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7752 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7753 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7754 && int_fits_type_p (TREE_OPERAND (exp, 1),
7755 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7756 /* Don't use a widening multiply if a shift will do. */
7757 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7758 > HOST_BITS_PER_WIDE_INT)
7759 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7761 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7762 && (TYPE_PRECISION (TREE_TYPE
7763 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7764 == TYPE_PRECISION (TREE_TYPE
7766 (TREE_OPERAND (exp, 0), 0))))
7767 /* If both operands are extended, they must either both
7768 be zero-extended or both be sign-extended. */
7769 && (TYPE_UNSIGNED (TREE_TYPE
7770 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7771 == TYPE_UNSIGNED (TREE_TYPE
7773 (TREE_OPERAND (exp, 0), 0)))))))
7775 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7776 enum machine_mode innermode = TYPE_MODE (op0type);
7777 bool zextend_p = TYPE_UNSIGNED (op0type);
7778 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7779 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7781 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7783 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7785 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7786 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7787 TREE_OPERAND (exp, 1),
7788 NULL_RTX, &op0, &op1, 0);
7790 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7791 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7792 NULL_RTX, &op0, &op1, 0);
7795 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7796 && innermode == word_mode)
7799 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7800 NULL_RTX, VOIDmode, 0);
7801 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7802 op1 = convert_modes (innermode, mode,
7803 expand_expr (TREE_OPERAND (exp, 1),
7804 NULL_RTX, VOIDmode, 0),
7807 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7808 NULL_RTX, VOIDmode, 0);
7809 temp = expand_binop (mode, other_optab, op0, op1, target,
7810 unsignedp, OPTAB_LIB_WIDEN);
7811 hipart = gen_highpart (innermode, temp);
7812 htem = expand_mult_highpart_adjust (innermode, hipart,
7816 emit_move_insn (hipart, htem);
7817 return REDUCE_BIT_FIELD (temp);
7821 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7822 subtarget, &op0, &op1, 0);
7823 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7825 case TRUNC_DIV_EXPR:
7826 case FLOOR_DIV_EXPR:
7828 case ROUND_DIV_EXPR:
7829 case EXACT_DIV_EXPR:
7830 if (modifier == EXPAND_STACK_PARM)
7832 /* Possible optimization: compute the dividend with EXPAND_SUM
7833 then if the divisor is constant can optimize the case
7834 where some terms of the dividend have coeffs divisible by it. */
7835 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7836 subtarget, &op0, &op1, 0);
7837 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7842 case TRUNC_MOD_EXPR:
7843 case FLOOR_MOD_EXPR:
7845 case ROUND_MOD_EXPR:
7846 if (modifier == EXPAND_STACK_PARM)
7848 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7849 subtarget, &op0, &op1, 0);
7850 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7852 case FIX_ROUND_EXPR:
7853 case FIX_FLOOR_EXPR:
7855 gcc_unreachable (); /* Not used for C. */
7857 case FIX_TRUNC_EXPR:
7858 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7859 if (target == 0 || modifier == EXPAND_STACK_PARM)
7860 target = gen_reg_rtx (mode);
7861 expand_fix (target, op0, unsignedp);
7865 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7866 if (target == 0 || modifier == EXPAND_STACK_PARM)
7867 target = gen_reg_rtx (mode);
7868 /* expand_float can't figure out what to do if FROM has VOIDmode.
7869 So give it the correct mode. With -O, cse will optimize this. */
7870 if (GET_MODE (op0) == VOIDmode)
7871 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7873 expand_float (target, op0,
7874 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7878 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7879 if (modifier == EXPAND_STACK_PARM)
7881 temp = expand_unop (mode,
7882 optab_for_tree_code (NEGATE_EXPR, type),
7885 return REDUCE_BIT_FIELD (temp);
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7889 if (modifier == EXPAND_STACK_PARM)
7892 /* ABS_EXPR is not valid for complex arguments. */
7893 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7894 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7896 /* Unsigned abs is simply the operand. Testing here means we don't
7897 risk generating incorrect code below. */
7898 if (TYPE_UNSIGNED (type))
7901 return expand_abs (mode, op0, target, unsignedp,
7902 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7906 target = original_target;
7908 || modifier == EXPAND_STACK_PARM
7909 || (MEM_P (target) && MEM_VOLATILE_P (target))
7910 || GET_MODE (target) != mode
7912 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7913 target = gen_reg_rtx (mode);
7914 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7915 target, &op0, &op1, 0);
7917 /* First try to do it with a special MIN or MAX instruction.
7918 If that does not win, use a conditional jump to select the proper
7920 this_optab = optab_for_tree_code (code, type);
7921 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7926 /* At this point, a MEM target is no longer useful; we will get better
7929 if (! REG_P (target))
7930 target = gen_reg_rtx (mode);
7932 /* If op1 was placed in target, swap op0 and op1. */
7933 if (target != op0 && target == op1)
7940 /* We generate better code and avoid problems with op1 mentioning
7941 target by forcing op1 into a pseudo if it isn't a constant. */
7942 if (! CONSTANT_P (op1))
7943 op1 = force_reg (mode, op1);
7945 #ifdef HAVE_conditional_move
7946 /* Use a conditional move if possible. */
7947 if (can_conditionally_move_p (mode))
7949 enum rtx_code comparison_code;
7952 if (code == MAX_EXPR)
7953 comparison_code = unsignedp ? GEU : GE;
7955 comparison_code = unsignedp ? LEU : LE;
7957 /* ??? Same problem as in expmed.c: emit_conditional_move
7958 forces a stack adjustment via compare_from_rtx, and we
7959 lose the stack adjustment if the sequence we are about
7960 to create is discarded. */
7961 do_pending_stack_adjust ();
7965 /* Try to emit the conditional move. */
7966 insn = emit_conditional_move (target, comparison_code,
7971 /* If we could do the conditional move, emit the sequence,
7975 rtx seq = get_insns ();
7981 /* Otherwise discard the sequence and fall back to code with
7987 emit_move_insn (target, op0);
7989 temp = gen_label_rtx ();
7991 /* If this mode is an integer too wide to compare properly,
7992 compare word by word. Rely on cse to optimize constant cases. */
7993 if (GET_MODE_CLASS (mode) == MODE_INT
7994 && ! can_compare_p (GE, mode, ccp_jump))
7996 if (code == MAX_EXPR)
7997 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8000 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8005 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8006 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8008 emit_move_insn (target, op1);
8013 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8014 if (modifier == EXPAND_STACK_PARM)
8016 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8020 /* ??? Can optimize bitwise operations with one arg constant.
8021 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8022 and (a bitwise1 b) bitwise2 b (etc)
8023 but that is probably not worth while. */
8025 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8026 boolean values when we want in all cases to compute both of them. In
8027 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8028 as actual zero-or-1 values and then bitwise anding. In cases where
8029 there cannot be any side effects, better code would be made by
8030 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8031 how to recognize those cases. */
8033 case TRUTH_AND_EXPR:
8034 code = BIT_AND_EXPR;
8039 code = BIT_IOR_EXPR;
8043 case TRUTH_XOR_EXPR:
8044 code = BIT_XOR_EXPR;
8052 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8054 if (modifier == EXPAND_STACK_PARM)
8056 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8057 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8060 /* Could determine the answer when only additive constants differ. Also,
8061 the addition of one can be handled by changing the condition. */
8068 case UNORDERED_EXPR:
8076 temp = do_store_flag (exp,
8077 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8078 tmode != VOIDmode ? tmode : mode, 0);
8082 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8083 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8085 && REG_P (original_target)
8086 && (GET_MODE (original_target)
8087 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8089 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8092 /* If temp is constant, we can just compute the result. */
8093 if (GET_CODE (temp) == CONST_INT)
8095 if (INTVAL (temp) != 0)
8096 emit_move_insn (target, const1_rtx);
8098 emit_move_insn (target, const0_rtx);
8103 if (temp != original_target)
8105 enum machine_mode mode1 = GET_MODE (temp);
8106 if (mode1 == VOIDmode)
8107 mode1 = tmode != VOIDmode ? tmode : mode;
8109 temp = copy_to_mode_reg (mode1, temp);
8112 op1 = gen_label_rtx ();
8113 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8114 GET_MODE (temp), unsignedp, op1);
8115 emit_move_insn (temp, const1_rtx);
8120 /* If no set-flag instruction, must generate a conditional store
8121 into a temporary variable. Drop through and handle this
8126 || modifier == EXPAND_STACK_PARM
8127 || ! safe_from_p (target, exp, 1)
8128 /* Make sure we don't have a hard reg (such as function's return
8129 value) live across basic blocks, if not optimizing. */
8130 || (!optimize && REG_P (target)
8131 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8132 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8135 emit_move_insn (target, const0_rtx);
8137 op1 = gen_label_rtx ();
8138 jumpifnot (exp, op1);
8141 emit_move_insn (target, const1_rtx);
8144 return ignore ? const0_rtx : target;
8146 case TRUTH_NOT_EXPR:
8147 if (modifier == EXPAND_STACK_PARM)
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8150 /* The parser is careful to generate TRUTH_NOT_EXPR
8151 only with operands that are always zero or one. */
8152 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8153 target, 1, OPTAB_LIB_WIDEN);
8157 case STATEMENT_LIST:
8159 tree_stmt_iterator iter;
8161 gcc_assert (ignore);
8163 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8164 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8169 /* A COND_EXPR with its type being VOID_TYPE represents a
8170 conditional jump and is handled in
8171 expand_gimple_cond_expr. */
8172 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8174 /* Note that COND_EXPRs whose type is a structure or union
8175 are required to be constructed to contain assignments of
8176 a temporary variable, so that we can evaluate them here
8177 for side effect only. If type is void, we must do likewise. */
8179 gcc_assert (!TREE_ADDRESSABLE (type)
8181 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8182 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8184 /* If we are not to produce a result, we have no target. Otherwise,
8185 if a target was specified use it; it will not be used as an
8186 intermediate target unless it is safe. If no target, use a
8189 if (modifier != EXPAND_STACK_PARM
8191 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8192 && GET_MODE (original_target) == mode
8193 #ifdef HAVE_conditional_move
8194 && (! can_conditionally_move_p (mode)
8195 || REG_P (original_target))
8197 && !MEM_P (original_target))
8198 temp = original_target;
8200 temp = assign_temp (type, 0, 0, 1);
8202 do_pending_stack_adjust ();
8204 op0 = gen_label_rtx ();
8205 op1 = gen_label_rtx ();
8206 jumpifnot (TREE_OPERAND (exp, 0), op0);
8207 store_expr (TREE_OPERAND (exp, 1), temp,
8208 modifier == EXPAND_STACK_PARM);
8210 emit_jump_insn (gen_jump (op1));
8213 store_expr (TREE_OPERAND (exp, 2), temp,
8214 modifier == EXPAND_STACK_PARM);
8221 target = expand_vec_cond_expr (exp, target);
8226 tree lhs = TREE_OPERAND (exp, 0);
8227 tree rhs = TREE_OPERAND (exp, 1);
8229 gcc_assert (ignore);
8231 /* Check for |= or &= of a bitfield of size one into another bitfield
8232 of size 1. In this case, (unless we need the result of the
8233 assignment) we can do this more efficiently with a
8234 test followed by an assignment, if necessary.
8236 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8237 things change so we do, this code should be enhanced to
8239 if (TREE_CODE (lhs) == COMPONENT_REF
8240 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8241 || TREE_CODE (rhs) == BIT_AND_EXPR)
8242 && TREE_OPERAND (rhs, 0) == lhs
8243 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8244 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8245 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8247 rtx label = gen_label_rtx ();
8249 do_jump (TREE_OPERAND (rhs, 1),
8250 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8251 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8252 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8253 (TREE_CODE (rhs) == BIT_IOR_EXPR
8255 : integer_zero_node)));
8256 do_pending_stack_adjust ();
8261 expand_assignment (lhs, rhs);
8267 if (!TREE_OPERAND (exp, 0))
8268 expand_null_return ();
8270 expand_return (TREE_OPERAND (exp, 0));
8274 return expand_expr_addr_expr (exp, target, tmode, modifier);
8277 /* Get the rtx code of the operands. */
8278 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8279 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8282 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8284 /* Move the real (op0) and imaginary (op1) parts to their location. */
8285 write_complex_part (target, op0, false);
8286 write_complex_part (target, op1, true);
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8292 return read_complex_part (op0, false);
8295 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8296 return read_complex_part (op0, true);
8299 expand_resx_expr (exp);
8302 case TRY_CATCH_EXPR:
8304 case EH_FILTER_EXPR:
8305 case TRY_FINALLY_EXPR:
8306 /* Lowered by tree-eh.c. */
8309 case WITH_CLEANUP_EXPR:
8310 case CLEANUP_POINT_EXPR:
8312 case CASE_LABEL_EXPR:
8318 case PREINCREMENT_EXPR:
8319 case PREDECREMENT_EXPR:
8320 case POSTINCREMENT_EXPR:
8321 case POSTDECREMENT_EXPR:
8324 case TRUTH_ANDIF_EXPR:
8325 case TRUTH_ORIF_EXPR:
8326 /* Lowered by gimplify.c. */
8330 return get_exception_pointer (cfun);
8333 return get_exception_filter (cfun);
8336 /* Function descriptors are not valid except for as
8337 initialization constants, and should not be expanded. */
8345 expand_label (TREE_OPERAND (exp, 0));
8349 expand_asm_expr (exp);
8352 case WITH_SIZE_EXPR:
8353 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8354 have pulled out the size to use in whatever context it needed. */
8355 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8358 case REALIGN_LOAD_EXPR:
8360 tree oprnd0 = TREE_OPERAND (exp, 0);
8361 tree oprnd1 = TREE_OPERAND (exp, 1);
8362 tree oprnd2 = TREE_OPERAND (exp, 2);
8365 this_optab = optab_for_tree_code (code, type);
8366 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8367 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8368 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8374 case REDUC_MAX_EXPR:
8375 case REDUC_MIN_EXPR:
8376 case REDUC_PLUS_EXPR:
8378 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8379 this_optab = optab_for_tree_code (code, type);
8380 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8385 case VEC_LSHIFT_EXPR:
8386 case VEC_RSHIFT_EXPR:
8388 target = expand_vec_shift_expr (exp, target);
8393 return lang_hooks.expand_expr (exp, original_target, tmode,
8397 /* Here to do an ordinary binary operator. */
8399 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8400 subtarget, &op0, &op1, 0);
8402 this_optab = optab_for_tree_code (code, type);
8404 if (modifier == EXPAND_STACK_PARM)
8406 temp = expand_binop (mode, this_optab, op0, op1, target,
8407 unsignedp, OPTAB_LIB_WIDEN);
8409 return REDUCE_BIT_FIELD (temp);
8411 #undef REDUCE_BIT_FIELD
8413 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8414 signedness of TYPE), possibly returning the result in TARGET. */
8416 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8418 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8419 if (target && GET_MODE (target) != GET_MODE (exp))
8421 if (TYPE_UNSIGNED (type))
8424 if (prec < HOST_BITS_PER_WIDE_INT)
8425 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8428 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8429 ((unsigned HOST_WIDE_INT) 1
8430 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8432 return expand_and (GET_MODE (exp), exp, mask, target);
8436 tree count = build_int_cst (NULL_TREE,
8437 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8438 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8439 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8443 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8444 when applied to the address of EXP produces an address known to be
8445 aligned more than BIGGEST_ALIGNMENT. */
8448 is_aligning_offset (tree offset, tree exp)
8450 /* Strip off any conversions. */
8451 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8452 || TREE_CODE (offset) == NOP_EXPR
8453 || TREE_CODE (offset) == CONVERT_EXPR)
8454 offset = TREE_OPERAND (offset, 0);
8456 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8457 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8458 if (TREE_CODE (offset) != BIT_AND_EXPR
8459 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8460 || compare_tree_int (TREE_OPERAND (offset, 1),
8461 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8462 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8465 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8466 It must be NEGATE_EXPR. Then strip any more conversions. */
8467 offset = TREE_OPERAND (offset, 0);
8468 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8469 || TREE_CODE (offset) == NOP_EXPR
8470 || TREE_CODE (offset) == CONVERT_EXPR)
8471 offset = TREE_OPERAND (offset, 0);
8473 if (TREE_CODE (offset) != NEGATE_EXPR)
8476 offset = TREE_OPERAND (offset, 0);
8477 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8478 || TREE_CODE (offset) == NOP_EXPR
8479 || TREE_CODE (offset) == CONVERT_EXPR)
8480 offset = TREE_OPERAND (offset, 0);
8482 /* This must now be the address of EXP. */
8483 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8486 /* Return the tree node if an ARG corresponds to a string constant or zero
8487 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8488 in bytes within the string that ARG is accessing. The type of the
8489 offset will be `sizetype'. */
8492 string_constant (tree arg, tree *ptr_offset)
8497 if (TREE_CODE (arg) == ADDR_EXPR)
8499 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8501 *ptr_offset = size_zero_node;
8502 return TREE_OPERAND (arg, 0);
8504 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8506 array = TREE_OPERAND (arg, 0);
8507 offset = size_zero_node;
8509 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8511 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8512 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8513 if (TREE_CODE (array) != STRING_CST
8514 && TREE_CODE (array) != VAR_DECL)
8520 else if (TREE_CODE (arg) == PLUS_EXPR)
8522 tree arg0 = TREE_OPERAND (arg, 0);
8523 tree arg1 = TREE_OPERAND (arg, 1);
8528 if (TREE_CODE (arg0) == ADDR_EXPR
8529 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8530 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8532 array = TREE_OPERAND (arg0, 0);
8535 else if (TREE_CODE (arg1) == ADDR_EXPR
8536 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8537 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8539 array = TREE_OPERAND (arg1, 0);
8548 if (TREE_CODE (array) == STRING_CST)
8550 *ptr_offset = convert (sizetype, offset);
8553 else if (TREE_CODE (array) == VAR_DECL)
8557 /* Variables initialized to string literals can be handled too. */
8558 if (DECL_INITIAL (array) == NULL_TREE
8559 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8562 /* If they are read-only, non-volatile and bind locally. */
8563 if (! TREE_READONLY (array)
8564 || TREE_SIDE_EFFECTS (array)
8565 || ! targetm.binds_local_p (array))
8568 /* Avoid const char foo[4] = "abcde"; */
8569 if (DECL_SIZE_UNIT (array) == NULL_TREE
8570 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8571 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8572 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8575 /* If variable is bigger than the string literal, OFFSET must be constant
8576 and inside of the bounds of the string literal. */
8577 offset = convert (sizetype, offset);
8578 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8579 && (! host_integerp (offset, 1)
8580 || compare_tree_int (offset, length) >= 0))
8583 *ptr_offset = offset;
8584 return DECL_INITIAL (array);
8590 /* Generate code to calculate EXP using a store-flag instruction
8591 and return an rtx for the result. EXP is either a comparison
8592 or a TRUTH_NOT_EXPR whose operand is a comparison.
8594 If TARGET is nonzero, store the result there if convenient.
8596 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8599 Return zero if there is no suitable set-flag instruction
8600 available on this machine.
8602 Once expand_expr has been called on the arguments of the comparison,
8603 we are committed to doing the store flag, since it is not safe to
8604 re-evaluate the expression. We emit the store-flag insn by calling
8605 emit_store_flag, but only expand the arguments if we have a reason
8606 to believe that emit_store_flag will be successful. If we think that
8607 it will, but it isn't, we have to simulate the store-flag with a
8608 set/jump/set sequence. */
8611 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8614 tree arg0, arg1, type;
8616 enum machine_mode operand_mode;
8620 enum insn_code icode;
8621 rtx subtarget = target;
8624 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8625 result at the end. We can't simply invert the test since it would
8626 have already been inverted if it were valid. This case occurs for
8627 some floating-point comparisons. */
8629 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8630 invert = 1, exp = TREE_OPERAND (exp, 0);
8632 arg0 = TREE_OPERAND (exp, 0);
8633 arg1 = TREE_OPERAND (exp, 1);
8635 /* Don't crash if the comparison was erroneous. */
8636 if (arg0 == error_mark_node || arg1 == error_mark_node)
8639 type = TREE_TYPE (arg0);
8640 operand_mode = TYPE_MODE (type);
8641 unsignedp = TYPE_UNSIGNED (type);
8643 /* We won't bother with BLKmode store-flag operations because it would mean
8644 passing a lot of information to emit_store_flag. */
8645 if (operand_mode == BLKmode)
8648 /* We won't bother with store-flag operations involving function pointers
8649 when function pointers must be canonicalized before comparisons. */
8650 #ifdef HAVE_canonicalize_funcptr_for_compare
8651 if (HAVE_canonicalize_funcptr_for_compare
8652 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8653 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8655 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8656 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8657 == FUNCTION_TYPE))))
8664 /* Get the rtx comparison code to use. We know that EXP is a comparison
8665 operation of some type. Some comparisons against 1 and -1 can be
8666 converted to comparisons with zero. Do so here so that the tests
8667 below will be aware that we have a comparison with zero. These
8668 tests will not catch constants in the first operand, but constants
8669 are rarely passed as the first operand. */
8671 switch (TREE_CODE (exp))
8680 if (integer_onep (arg1))
8681 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8683 code = unsignedp ? LTU : LT;
8686 if (! unsignedp && integer_all_onesp (arg1))
8687 arg1 = integer_zero_node, code = LT;
8689 code = unsignedp ? LEU : LE;
8692 if (! unsignedp && integer_all_onesp (arg1))
8693 arg1 = integer_zero_node, code = GE;
8695 code = unsignedp ? GTU : GT;
8698 if (integer_onep (arg1))
8699 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8701 code = unsignedp ? GEU : GE;
8704 case UNORDERED_EXPR:
8733 /* Put a constant second. */
8734 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8736 tem = arg0; arg0 = arg1; arg1 = tem;
8737 code = swap_condition (code);
8740 /* If this is an equality or inequality test of a single bit, we can
8741 do this by shifting the bit being tested to the low-order bit and
8742 masking the result with the constant 1. If the condition was EQ,
8743 we xor it with 1. This does not require an scc insn and is faster
8744 than an scc insn even if we have it.
8746 The code to make this transformation was moved into fold_single_bit_test,
8747 so we just call into the folder and expand its result. */
8749 if ((code == NE || code == EQ)
8750 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8751 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8753 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8754 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8756 target, VOIDmode, EXPAND_NORMAL);
8759 /* Now see if we are likely to be able to do this. Return if not. */
8760 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8763 icode = setcc_gen_code[(int) code];
8764 if (icode == CODE_FOR_nothing
8765 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8767 /* We can only do this if it is one of the special cases that
8768 can be handled without an scc insn. */
8769 if ((code == LT && integer_zerop (arg1))
8770 || (! only_cheap && code == GE && integer_zerop (arg1)))
8772 else if (! only_cheap && (code == NE || code == EQ)
8773 && TREE_CODE (type) != REAL_TYPE
8774 && ((abs_optab->handlers[(int) operand_mode].insn_code
8775 != CODE_FOR_nothing)
8776 || (ffs_optab->handlers[(int) operand_mode].insn_code
8777 != CODE_FOR_nothing)))
8783 if (! get_subtarget (target)
8784 || GET_MODE (subtarget) != operand_mode)
8787 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8790 target = gen_reg_rtx (mode);
8792 result = emit_store_flag (target, code, op0, op1,
8793 operand_mode, unsignedp, 1);
8798 result = expand_binop (mode, xor_optab, result, const1_rtx,
8799 result, 0, OPTAB_LIB_WIDEN);
8803 /* If this failed, we have to do this with set/compare/jump/set code. */
8805 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8806 target = gen_reg_rtx (GET_MODE (target));
8808 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8809 result = compare_from_rtx (op0, op1, code, unsignedp,
8810 operand_mode, NULL_RTX);
8811 if (GET_CODE (result) == CONST_INT)
8812 return (((result == const0_rtx && ! invert)
8813 || (result != const0_rtx && invert))
8814 ? const0_rtx : const1_rtx);
8816 /* The code of RESULT may not match CODE if compare_from_rtx
8817 decided to swap its operands and reverse the original code.
8819 We know that compare_from_rtx returns either a CONST_INT or
8820 a new comparison code, so it is safe to just extract the
8821 code from RESULT. */
8822 code = GET_CODE (result);
8824 label = gen_label_rtx ();
8825 gcc_assert (bcc_gen_fctn[(int) code]);
8827 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8828 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8835 /* Stubs in case we haven't got a casesi insn. */
8837 # define HAVE_casesi 0
8838 # define gen_casesi(a, b, c, d, e) (0)
8839 # define CODE_FOR_casesi CODE_FOR_nothing
8842 /* If the machine does not have a case insn that compares the bounds,
8843 this means extra overhead for dispatch tables, which raises the
8844 threshold for using them. */
8845 #ifndef CASE_VALUES_THRESHOLD
8846 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8847 #endif /* CASE_VALUES_THRESHOLD */
8850 case_values_threshold (void)
8852 return CASE_VALUES_THRESHOLD;
8855 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8856 0 otherwise (i.e. if there is no casesi instruction). */
8858 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8859 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8861 enum machine_mode index_mode = SImode;
8862 int index_bits = GET_MODE_BITSIZE (index_mode);
8863 rtx op1, op2, index;
8864 enum machine_mode op_mode;
8869 /* Convert the index to SImode. */
8870 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8872 enum machine_mode omode = TYPE_MODE (index_type);
8873 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8875 /* We must handle the endpoints in the original mode. */
8876 index_expr = build2 (MINUS_EXPR, index_type,
8877 index_expr, minval);
8878 minval = integer_zero_node;
8879 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8880 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8881 omode, 1, default_label);
8882 /* Now we can safely truncate. */
8883 index = convert_to_mode (index_mode, index, 0);
8887 if (TYPE_MODE (index_type) != index_mode)
8889 index_expr = convert (lang_hooks.types.type_for_size
8890 (index_bits, 0), index_expr);
8891 index_type = TREE_TYPE (index_expr);
8894 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8897 do_pending_stack_adjust ();
8899 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8900 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8902 index = copy_to_mode_reg (op_mode, index);
8904 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8906 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8907 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8908 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8909 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8911 op1 = copy_to_mode_reg (op_mode, op1);
8913 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8915 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8916 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8917 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8918 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8920 op2 = copy_to_mode_reg (op_mode, op2);
8922 emit_jump_insn (gen_casesi (index, op1, op2,
8923 table_label, default_label));
8927 /* Attempt to generate a tablejump instruction; same concept. */
8928 #ifndef HAVE_tablejump
8929 #define HAVE_tablejump 0
8930 #define gen_tablejump(x, y) (0)
8933 /* Subroutine of the next function.
8935 INDEX is the value being switched on, with the lowest value
8936 in the table already subtracted.
8937 MODE is its expected mode (needed if INDEX is constant).
8938 RANGE is the length of the jump table.
8939 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8941 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8942 index value is out of range. */
8945 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8950 if (INTVAL (range) > cfun->max_jumptable_ents)
8951 cfun->max_jumptable_ents = INTVAL (range);
8953 /* Do an unsigned comparison (in the proper mode) between the index
8954 expression and the value which represents the length of the range.
8955 Since we just finished subtracting the lower bound of the range
8956 from the index expression, this comparison allows us to simultaneously
8957 check that the original index expression value is both greater than
8958 or equal to the minimum value of the range and less than or equal to
8959 the maximum value of the range. */
8961 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8964 /* If index is in range, it must fit in Pmode.
8965 Convert to Pmode so we can index with it. */
8967 index = convert_to_mode (Pmode, index, 1);
8969 /* Don't let a MEM slip through, because then INDEX that comes
8970 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8971 and break_out_memory_refs will go to work on it and mess it up. */
8972 #ifdef PIC_CASE_VECTOR_ADDRESS
8973 if (flag_pic && !REG_P (index))
8974 index = copy_to_mode_reg (Pmode, index);
8977 /* If flag_force_addr were to affect this address
8978 it could interfere with the tricky assumptions made
8979 about addresses that contain label-refs,
8980 which may be valid only very near the tablejump itself. */
8981 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8982 GET_MODE_SIZE, because this indicates how large insns are. The other
8983 uses should all be Pmode, because they are addresses. This code
8984 could fail if addresses and insns are not the same size. */
8985 index = gen_rtx_PLUS (Pmode,
8986 gen_rtx_MULT (Pmode, index,
8987 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8988 gen_rtx_LABEL_REF (Pmode, table_label));
8989 #ifdef PIC_CASE_VECTOR_ADDRESS
8991 index = PIC_CASE_VECTOR_ADDRESS (index);
8994 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8995 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8996 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8997 convert_move (temp, vector, 0);
8999 emit_jump_insn (gen_tablejump (temp, table_label));
9001 /* If we are generating PIC code or if the table is PC-relative, the
9002 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9003 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9008 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9009 rtx table_label, rtx default_label)
9013 if (! HAVE_tablejump)
9016 index_expr = fold_build2 (MINUS_EXPR, index_type,
9017 convert (index_type, index_expr),
9018 convert (index_type, minval));
9019 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9020 do_pending_stack_adjust ();
9022 do_tablejump (index, TYPE_MODE (index_type),
9023 convert_modes (TYPE_MODE (index_type),
9024 TYPE_MODE (TREE_TYPE (range)),
9025 expand_expr (range, NULL_RTX,
9027 TYPE_UNSIGNED (TREE_TYPE (range))),
9028 table_label, default_label);
9032 /* Nonzero if the mode is a valid vector mode for this architecture.
9033 This returns nonzero even if there is no hardware support for the
9034 vector mode, but we can emulate with narrower modes. */
9037 vector_mode_valid_p (enum machine_mode mode)
9039 enum mode_class class = GET_MODE_CLASS (mode);
9040 enum machine_mode innermode;
9042 /* Doh! What's going on? */
9043 if (class != MODE_VECTOR_INT
9044 && class != MODE_VECTOR_FLOAT)
9047 /* Hardware support. Woo hoo! */
9048 if (targetm.vector_mode_supported_p (mode))
9051 innermode = GET_MODE_INNER (mode);
9053 /* We should probably return 1 if requesting V4DI and we have no DI,
9054 but we have V2DI, but this is probably very unlikely. */
9056 /* If we have support for the inner mode, we can safely emulate it.
9057 We may not have V2DI, but me can emulate with a pair of DIs. */
9058 return targetm.scalar_mode_supported_p (innermode);
9061 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9063 const_vector_from_tree (tree exp)
9068 enum machine_mode inner, mode;
9070 mode = TYPE_MODE (TREE_TYPE (exp));
9072 if (initializer_zerop (exp))
9073 return CONST0_RTX (mode);
9075 units = GET_MODE_NUNITS (mode);
9076 inner = GET_MODE_INNER (mode);
9078 v = rtvec_alloc (units);
9080 link = TREE_VECTOR_CST_ELTS (exp);
9081 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9083 elt = TREE_VALUE (link);
9085 if (TREE_CODE (elt) == REAL_CST)
9086 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9089 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9090 TREE_INT_CST_HIGH (elt),
9094 /* Initialize remaining elements to 0. */
9095 for (; i < units; ++i)
9096 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9098 return gen_rtx_CONST_VECTOR (mode, v);
9100 #include "gt-expr.h"