1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 SET_REGNO (reg, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
456 insns = get_insns ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
519 expand_fixed_convert (to, from, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
589 fill_value = const0_rtx;
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 convert_move (to, gen_lowpart (word_mode, from), 0);
647 /* Now follow all the conversions between integers
648 no more than a word long. */
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
653 GET_MODE_BITSIZE (from_mode)))
656 && ! MEM_VOLATILE_P (from)
657 && direct_load[(int) to_mode]
658 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || GET_CODE (from) == SUBREG))
661 from = force_reg (from_mode, from);
662 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
664 from = copy_to_reg (from);
665 emit_move_insn (to, gen_lowpart (to_mode, from));
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
672 /* Convert directly if that works. */
673 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
676 emit_unop_insn (code, to, from, equiv_code);
681 enum machine_mode intermediate;
685 /* Search for a mode to convert via. */
686 for (intermediate = from_mode; intermediate != VOIDmode;
687 intermediate = GET_MODE_WIDER_MODE (intermediate))
688 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
692 GET_MODE_BITSIZE (intermediate))))
693 && (can_extend_p (intermediate, from_mode, unsignedp)
694 != CODE_FOR_nothing))
696 convert_move (to, convert_to_mode (intermediate, from,
697 unsignedp), unsignedp);
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount = build_int_cst (NULL_TREE,
704 GET_MODE_BITSIZE (to_mode)
705 - GET_MODE_BITSIZE (from_mode));
706 from = gen_lowpart (to_mode, force_reg (from_mode, from));
707 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
712 emit_move_insn (to, tmp);
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
720 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
734 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
735 emit_move_insn (to, temp);
739 /* Mode combination is not recognized. */
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
751 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
753 return convert_modes (mode, VOIDmode, x, unsignedp);
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
767 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
774 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
776 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
777 x = gen_lowpart (mode, x);
779 if (GET_MODE (x) != VOIDmode)
780 oldmode = GET_MODE (x);
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
791 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
795 HOST_WIDE_INT val = INTVAL (x);
797 if (oldmode != VOIDmode
798 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
800 int width = GET_MODE_BITSIZE (oldmode);
802 /* We need to zero extend VAL. */
803 val &= ((HOST_WIDE_INT) 1 << width) - 1;
806 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
814 if ((GET_CODE (x) == CONST_INT
815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
816 || (GET_MODE_CLASS (mode) == MODE_INT
817 && GET_MODE_CLASS (oldmode) == MODE_INT
818 && (GET_CODE (x) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
820 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
821 && direct_load[(int) mode])
823 && (! HARD_REGISTER_P (x)
824 || HARD_REGNO_MODE_OK (REGNO (x), mode))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
826 GET_MODE_BITSIZE (GET_MODE (x)))))))))
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
832 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
834 HOST_WIDE_INT val = INTVAL (x);
835 int width = GET_MODE_BITSIZE (oldmode);
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
842 val |= (HOST_WIDE_INT) (-1) << width;
844 return gen_int_mode (val, mode);
847 return gen_lowpart (mode, x);
850 /* Converting from integer constant into mode is always equivalent to an
852 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
854 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
855 return simplify_gen_subreg (mode, x, oldmode, 0);
858 temp = gen_reg_rtx (mode);
859 convert_move (temp, x, unsignedp);
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
875 can_move_by_pieces (unsigned HOST_WIDE_INT len,
876 unsigned int align ATTRIBUTE_UNUSED)
878 return MOVE_BY_PIECES_P (len, align);
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
887 ALIGN is maximum stack alignment we can assume.
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
894 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
895 unsigned int align, int endp)
897 struct move_by_pieces data;
898 rtx to_addr, from_addr = XEXP (from, 0);
899 unsigned int max_size = MOVE_MAX_PIECES + 1;
900 enum machine_mode mode = VOIDmode, tmode;
901 enum insn_code icode;
903 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.from_addr = from_addr;
909 to_addr = XEXP (to, 0);
912 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
913 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
922 #ifdef STACK_GROWS_DOWNWARD
928 data.to_addr = to_addr;
931 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
932 || GET_CODE (from_addr) == POST_INC
933 || GET_CODE (from_addr) == POST_DEC);
935 data.explicit_inc_from = 0;
936 data.explicit_inc_to = 0;
937 if (data.reverse) data.offset = len;
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data.autinc_from && data.autinc_to)
944 && move_by_pieces_ninsns (len, align, max_size) > 2)
946 /* Find the mode of the largest move... */
947 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
948 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
949 if (GET_MODE_SIZE (tmode) < max_size)
952 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
954 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
955 data.autinc_from = 1;
956 data.explicit_inc_from = -1;
958 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
960 data.from_addr = copy_addr_to_reg (from_addr);
961 data.autinc_from = 1;
962 data.explicit_inc_from = 1;
964 if (!data.autinc_from && CONSTANT_P (from_addr))
965 data.from_addr = copy_addr_to_reg (from_addr);
966 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
968 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.explicit_inc_to = -1;
972 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
974 data.to_addr = copy_addr_to_reg (to_addr);
976 data.explicit_inc_to = 1;
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
982 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
983 if (align >= GET_MODE_ALIGNMENT (tmode))
984 align = GET_MODE_ALIGNMENT (tmode);
987 enum machine_mode xmode;
989 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
992 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode, align))
996 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1002 while (max_size > 1)
1004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1006 if (GET_MODE_SIZE (tmode) < max_size)
1009 if (mode == VOIDmode)
1012 icode = optab_handler (mov_optab, mode)->insn_code;
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1016 max_size = GET_MODE_SIZE (mode);
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data.len);
1026 gcc_assert (!data.reverse);
1031 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1032 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1037 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1044 to1 = adjust_address (data.to, QImode, data.offset);
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1057 unsigned int max_size)
1059 unsigned HOST_WIDE_INT n_insns = 0;
1060 enum machine_mode tmode;
1062 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1063 if (align >= GET_MODE_ALIGNMENT (tmode))
1064 align = GET_MODE_ALIGNMENT (tmode);
1067 enum machine_mode tmode, xmode;
1069 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1072 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode, align))
1076 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1079 while (max_size > 1)
1081 enum machine_mode mode = VOIDmode;
1082 enum insn_code icode;
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1089 if (mode == VOIDmode)
1092 icode = optab_handler (mov_optab, mode)->insn_code;
1093 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1094 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1096 max_size = GET_MODE_SIZE (mode);
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1108 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1109 struct move_by_pieces *data)
1111 unsigned int size = GET_MODE_SIZE (mode);
1112 rtx to1 = NULL_RTX, from1;
1114 while (data->len >= size)
1117 data->offset -= size;
1121 if (data->autinc_to)
1122 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1125 to1 = adjust_address (data->to, mode, data->offset);
1128 if (data->autinc_from)
1129 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1132 from1 = adjust_address (data->from, mode, data->offset);
1134 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1135 emit_insn (gen_add2_insn (data->to_addr,
1136 GEN_INT (-(HOST_WIDE_INT)size)));
1137 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1138 emit_insn (gen_add2_insn (data->from_addr,
1139 GEN_INT (-(HOST_WIDE_INT)size)));
1142 emit_insn ((*genfun) (to1, from1));
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode, from1, NULL);
1152 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1153 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1154 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1155 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1157 if (! data->reverse)
1158 data->offset += size;
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1173 Return the address of the new block, if memcpy is called and returns it,
1177 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1178 unsigned int expected_align, HOST_WIDE_INT expected_size)
1186 case BLOCK_OP_NORMAL:
1187 case BLOCK_OP_TAILCALL:
1188 may_use_call = true;
1191 case BLOCK_OP_CALL_PARM:
1192 may_use_call = block_move_libcall_safe_for_call_parm ();
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1199 case BLOCK_OP_NO_LIBCALL:
1200 may_use_call = false;
1207 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1209 gcc_assert (MEM_P (x));
1210 gcc_assert (MEM_P (y));
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x = adjust_address (x, BLKmode, 0);
1216 y = adjust_address (y, BLKmode, 0);
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size) == CONST_INT)
1222 if (INTVAL (size) == 0)
1225 x = shallow_copy_rtx (x);
1226 y = shallow_copy_rtx (y);
1227 set_mem_size (x, size);
1228 set_mem_size (y, size);
1231 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1232 move_by_pieces (x, y, INTVAL (size), align, 0);
1233 else if (emit_block_move_via_movmem (x, y, size, align,
1234 expected_align, expected_size))
1236 else if (may_use_call)
1237 retval = emit_block_move_via_libcall (x, y, size,
1238 method == BLOCK_OP_TAILCALL);
1240 emit_block_move_via_loop (x, y, size, align);
1242 if (method == BLOCK_OP_CALL_PARM)
1249 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1251 return emit_block_move_hints (x, y, size, method, 0, -1);
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1259 block_move_libcall_safe_for_call_parm (void)
1261 #if defined (REG_PARM_STACK_SPACE)
1265 /* If arguments are pushed on the stack, then they're safe. */
1269 /* If registers go on the stack anyway, any argument is sure to clobber
1270 an outgoing argument. */
1271 #if defined (REG_PARM_STACK_SPACE)
1272 fn = emit_block_move_libcall_fn (false);
1273 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1274 && REG_PARM_STACK_SPACE (fn) != 0)
1278 /* If any argument goes in memory, then it might clobber an outgoing
1281 CUMULATIVE_ARGS args_so_far;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1346 rtx last = get_last_insn ();
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align
1365 GEN_INT (expected_size));
1369 volatile_ok = save_volatile_ok;
1373 delete_insns_since (last);
1377 volatile_ok = save_volatile_ok;
1381 /* A subroutine of emit_block_move. Expand a call to memcpy.
1382 Return the return value from memcpy, 0 otherwise. */
1385 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1387 rtx dst_addr, src_addr;
1388 tree call_expr, fn, src_tree, dst_tree, size_tree;
1389 enum machine_mode size_mode;
1392 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1393 pseudos. We can then place those new pseudos into a VAR_DECL and
1396 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1397 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1399 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1400 src_addr = convert_memory_address (ptr_mode, src_addr);
1402 dst_tree = make_tree (ptr_type_node, dst_addr);
1403 src_tree = make_tree (ptr_type_node, src_addr);
1405 size_mode = TYPE_MODE (sizetype);
1407 size = convert_to_mode (size_mode, size, 1);
1408 size = copy_to_mode_reg (size_mode, size);
1410 /* It is incorrect to use the libcall calling conventions to call
1411 memcpy in this context. This could be a user call to memcpy and
1412 the user may wish to examine the return value from memcpy. For
1413 targets where libcalls and normal calls have different conventions
1414 for returning pointers, we could end up generating incorrect code. */
1416 size_tree = make_tree (sizetype, size);
1418 fn = emit_block_move_libcall_fn (true);
1419 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1420 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1422 retval = expand_normal (call_expr);
1427 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1428 for the function we use for block copies. The first time FOR_CALL
1429 is true, we call assemble_external. */
1431 static GTY(()) tree block_move_fn;
1434 init_block_move_fn (const char *asmspec)
1440 fn = get_identifier ("memcpy");
1441 args = build_function_type_list (ptr_type_node, ptr_type_node,
1442 const_ptr_type_node, sizetype,
1445 fn = build_decl (FUNCTION_DECL, fn, args);
1446 DECL_EXTERNAL (fn) = 1;
1447 TREE_PUBLIC (fn) = 1;
1448 DECL_ARTIFICIAL (fn) = 1;
1449 TREE_NOTHROW (fn) = 1;
1450 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1451 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1457 set_user_assembler_name (block_move_fn, asmspec);
1461 emit_block_move_libcall_fn (int for_call)
1463 static bool emitted_extern;
1466 init_block_move_fn (NULL);
1468 if (for_call && !emitted_extern)
1470 emitted_extern = true;
1471 make_decl_rtl (block_move_fn);
1472 assemble_external (block_move_fn);
1475 return block_move_fn;
1478 /* A subroutine of emit_block_move. Copy the data via an explicit
1479 loop. This is used only when libcalls are forbidden. */
1480 /* ??? It'd be nice to copy in hunks larger than QImode. */
1483 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1484 unsigned int align ATTRIBUTE_UNUSED)
1486 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1487 enum machine_mode iter_mode;
1489 iter_mode = GET_MODE (size);
1490 if (iter_mode == VOIDmode)
1491 iter_mode = word_mode;
1493 top_label = gen_label_rtx ();
1494 cmp_label = gen_label_rtx ();
1495 iter = gen_reg_rtx (iter_mode);
1497 emit_move_insn (iter, const0_rtx);
1499 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1500 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1501 do_pending_stack_adjust ();
1503 emit_jump (cmp_label);
1504 emit_label (top_label);
1506 tmp = convert_modes (Pmode, iter_mode, iter, true);
1507 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1508 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1509 x = change_address (x, QImode, x_addr);
1510 y = change_address (y, QImode, y_addr);
1512 emit_move_insn (x, y);
1514 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 true, OPTAB_LIB_WIDEN);
1517 emit_move_insn (iter, tmp);
1519 emit_label (cmp_label);
1521 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1532 #ifdef HAVE_load_multiple
1540 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541 x = validize_mem (force_const_mem (mode, x));
1543 /* See if the machine can do this with a load multiple insn. */
1544 #ifdef HAVE_load_multiple
1545 if (HAVE_load_multiple)
1547 last = get_last_insn ();
1548 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1556 delete_insns_since (last);
1560 for (i = 0; i < nregs; i++)
1561 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 operand_subword_force (x, i, mode));
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1569 move_block_from_reg (int regno, rtx x, int nregs)
1576 /* See if the machine can do this with a store multiple insn. */
1577 #ifdef HAVE_store_multiple
1578 if (HAVE_store_multiple)
1580 rtx last = get_last_insn ();
1581 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1589 delete_insns_since (last);
1593 for (i = 0; i < nregs; i++)
1595 rtx tem = operand_subword (x, i, 1, BLKmode);
1599 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604 ORIG, where ORIG is a non-consecutive group of registers represented by
1605 a PARALLEL. The clone is identical to the original except in that the
1606 original set of registers is replaced by a new set of pseudo registers.
1607 The new set has the same modes as the original set. */
1610 gen_group_rtx (rtx orig)
1615 gcc_assert (GET_CODE (orig) == PARALLEL);
1617 length = XVECLEN (orig, 0);
1618 tmps = XALLOCAVEC (rtx, length);
1620 /* Skip a NULL entry in first slot. */
1621 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1626 for (; i < length; i++)
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1631 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1634 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1637 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1638 except that values are placed in TMPS[i], and must later be moved
1639 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1646 enum machine_mode m = GET_MODE (orig_src);
1648 gcc_assert (GET_CODE (dst) == PARALLEL);
1651 && !SCALAR_INT_MODE_P (m)
1652 && !MEM_P (orig_src)
1653 && GET_CODE (orig_src) != CONCAT)
1655 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656 if (imode == BLKmode)
1657 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1659 src = gen_reg_rtx (imode);
1660 if (imode != BLKmode)
1661 src = gen_lowpart (GET_MODE (orig_src), src);
1662 emit_move_insn (src, orig_src);
1663 /* ...and back again. */
1664 if (imode != BLKmode)
1665 src = gen_lowpart (imode, src);
1666 emit_group_load_1 (tmps, dst, src, type, ssize);
1670 /* Check for a NULL entry, used to indicate that the parameter goes
1671 both on the stack and in registers. */
1672 if (XEXP (XVECEXP (dst, 0, 0), 0))
1677 /* Process the pieces. */
1678 for (i = start; i < XVECLEN (dst, 0); i++)
1680 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682 unsigned int bytelen = GET_MODE_SIZE (mode);
1685 /* Handle trailing fragments that run over the size of the struct. */
1686 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1688 /* Arrange to shift the fragment to where it belongs.
1689 extract_bit_field loads to the lsb of the reg. */
1691 #ifdef BLOCK_REG_PADDING
1692 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 == (BYTES_BIG_ENDIAN ? upward : downward)
1698 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 bytelen = ssize - bytepos;
1700 gcc_assert (bytelen > 0);
1703 /* If we won't be loading directly from memory, protect the real source
1704 from strange tricks we might play; but make sure that the source can
1705 be loaded directly into the destination. */
1707 if (!MEM_P (orig_src)
1708 && (!CONSTANT_P (orig_src)
1709 || (GET_MODE (orig_src) != mode
1710 && GET_MODE (orig_src) != VOIDmode)))
1712 if (GET_MODE (orig_src) == VOIDmode)
1713 src = gen_reg_rtx (mode);
1715 src = gen_reg_rtx (GET_MODE (orig_src));
1717 emit_move_insn (src, orig_src);
1720 /* Optimize the access just a bit. */
1722 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 && bytelen == GET_MODE_SIZE (mode))
1727 tmps[i] = gen_reg_rtx (mode);
1728 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1730 else if (COMPLEX_MODE_P (mode)
1731 && GET_MODE (src) == mode
1732 && bytelen == GET_MODE_SIZE (mode))
1733 /* Let emit_move_complex do the bulk of the work. */
1735 else if (GET_CODE (src) == CONCAT)
1737 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1740 if ((bytepos == 0 && bytelen == slen0)
1741 || (bytepos != 0 && bytepos + bytelen <= slen))
1743 /* The following assumes that the concatenated objects all
1744 have the same size. In this case, a simple calculation
1745 can be used to determine the object and the bit field
1747 tmps[i] = XEXP (src, bytepos / slen0);
1748 if (! CONSTANT_P (tmps[i])
1749 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 (bytepos % slen0) * BITS_PER_UNIT,
1752 1, NULL_RTX, mode, mode);
1758 gcc_assert (!bytepos);
1759 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760 emit_move_insn (mem, src);
1761 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 0, 1, NULL_RTX, mode, mode);
1765 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 SIMD register, which is currently broken. While we get GCC
1767 to emit proper RTL for these cases, let's dump to memory. */
1768 else if (VECTOR_MODE_P (GET_MODE (dst))
1771 int slen = GET_MODE_SIZE (GET_MODE (src));
1774 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775 emit_move_insn (mem, src);
1776 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1778 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779 && XVECLEN (dst, 0) > 1)
1780 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781 else if (CONSTANT_P (src))
1783 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1791 gcc_assert (2 * len == ssize);
1792 split_double (src, &first, &second);
1799 else if (REG_P (src) && GET_MODE (src) == mode)
1802 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1807 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1812 /* Emit code to move a block SRC of type TYPE to a block DST,
1813 where DST is non-consecutive registers represented by a PARALLEL.
1814 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1818 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1823 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1824 emit_group_load_1 (tmps, dst, src, type, ssize);
1826 /* Copy the extracted pieces into the proper (probable) hard regs. */
1827 for (i = 0; i < XVECLEN (dst, 0); i++)
1829 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1832 emit_move_insn (d, tmps[i]);
1836 /* Similar, but load SRC into new pseudos in a format that looks like
1837 PARALLEL. This can later be fed to emit_group_move to get things
1838 in the right place. */
1841 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1846 vec = rtvec_alloc (XVECLEN (parallel, 0));
1847 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1849 /* Convert the vector to look just like the original PARALLEL, except
1850 with the computed values. */
1851 for (i = 0; i < XVECLEN (parallel, 0); i++)
1853 rtx e = XVECEXP (parallel, 0, i);
1854 rtx d = XEXP (e, 0);
1858 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1867 /* Emit code to move a block SRC to block DST, where SRC and DST are
1868 non-consecutive groups of registers, each represented by a PARALLEL. */
1871 emit_group_move (rtx dst, rtx src)
1875 gcc_assert (GET_CODE (src) == PARALLEL
1876 && GET_CODE (dst) == PARALLEL
1877 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1879 /* Skip first entry if NULL. */
1880 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882 XEXP (XVECEXP (src, 0, i), 0));
1885 /* Move a group of registers represented by a PARALLEL into pseudos. */
1888 emit_group_move_into_temps (rtx src)
1890 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1893 for (i = 0; i < XVECLEN (src, 0); i++)
1895 rtx e = XVECEXP (src, 0, i);
1896 rtx d = XEXP (e, 0);
1899 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900 RTVEC_ELT (vec, i) = e;
1903 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1906 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907 where SRC is non-consecutive registers represented by a PARALLEL.
1908 SSIZE represents the total size of block ORIG_DST, or -1 if not
1912 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1915 int start, finish, i;
1916 enum machine_mode m = GET_MODE (orig_dst);
1918 gcc_assert (GET_CODE (src) == PARALLEL);
1920 if (!SCALAR_INT_MODE_P (m)
1921 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1923 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924 if (imode == BLKmode)
1925 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1927 dst = gen_reg_rtx (imode);
1928 emit_group_store (dst, src, type, ssize);
1929 if (imode != BLKmode)
1930 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931 emit_move_insn (orig_dst, dst);
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (src, 0, 0), 0))
1941 finish = XVECLEN (src, 0);
1943 tmps = XALLOCAVEC (rtx, finish);
1945 /* Copy the (probable) hard regs into pseudos. */
1946 for (i = start; i < finish; i++)
1948 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1951 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952 emit_move_insn (tmps[i], reg);
1958 /* If we won't be storing directly into memory, protect the real destination
1959 from strange tricks we might play. */
1961 if (GET_CODE (dst) == PARALLEL)
1965 /* We can get a PARALLEL dst if there is a conditional expression in
1966 a return statement. In that case, the dst and src are the same,
1967 so no action is necessary. */
1968 if (rtx_equal_p (dst, src))
1971 /* It is unclear if we can ever reach here, but we may as well handle
1972 it. Allocate a temporary, and split this into a store/load to/from
1975 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976 emit_group_store (temp, src, type, ssize);
1977 emit_group_load (dst, temp, type, ssize);
1980 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1982 enum machine_mode outer = GET_MODE (dst);
1983 enum machine_mode inner;
1984 HOST_WIDE_INT bytepos;
1988 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 dst = gen_reg_rtx (outer);
1991 /* Make life a bit easier for combine. */
1992 /* If the first element of the vector is the low part
1993 of the destination mode, use a paradoxical subreg to
1994 initialize the destination. */
1997 inner = GET_MODE (tmps[start]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[start],
2005 emit_move_insn (dst, temp);
2012 /* If the first element wasn't the low part, try the last. */
2014 && start < finish - 1)
2016 inner = GET_MODE (tmps[finish - 1]);
2017 bytepos = subreg_lowpart_offset (inner, outer);
2018 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2020 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2024 emit_move_insn (dst, temp);
2031 /* Otherwise, simply initialize the result to zero. */
2033 emit_move_insn (dst, CONST0_RTX (outer));
2036 /* Process the pieces. */
2037 for (i = start; i < finish; i++)
2039 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040 enum machine_mode mode = GET_MODE (tmps[i]);
2041 unsigned int bytelen = GET_MODE_SIZE (mode);
2044 /* Handle trailing fragments that run over the size of the struct. */
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 build_int_cst (NULL_TREE, shift),
2063 bytelen = ssize - bytepos;
2066 if (GET_CODE (dst) == CONCAT)
2068 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2069 dest = XEXP (dst, 0);
2070 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2072 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2073 dest = XEXP (dst, 1);
2077 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2078 dest = assign_stack_temp (GET_MODE (dest),
2079 GET_MODE_SIZE (GET_MODE (dest)), 0);
2080 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2087 /* Optimize the access just a bit. */
2089 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2090 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2091 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2093 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2095 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2099 /* Copy from the pseudo into the (probable) hard reg. */
2100 if (orig_dst != dst)
2101 emit_move_insn (orig_dst, dst);
2104 /* Generate code to copy a BLKmode object of TYPE out of a
2105 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2106 is null, a stack temporary is created. TGTBLK is returned.
2108 The purpose of this routine is to handle functions that return
2109 BLKmode structures in registers. Some machines (the PA for example)
2110 want to return all small structures in registers regardless of the
2111 structure's alignment. */
2114 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode copy_mode;
2124 tgtblk = assign_temp (build_qualified_type (type,
2126 | TYPE_QUAL_CONST)),
2128 preserve_temp_slots (tgtblk);
2131 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2132 into a new pseudo which is a full word. */
2134 if (GET_MODE (srcreg) != BLKmode
2135 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2136 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 /* If the structure doesn't take up a whole number of words, see whether
2139 SRCREG is padded on the left or on the right. If it's on the left,
2140 set PADDING_CORRECTION to the number of bits to skip.
2142 In most ABIs, the structure will be returned at the least end of
2143 the register, which translates to right padding on little-endian
2144 targets and left padding on big-endian targets. The opposite
2145 holds if the structure is returned at the most significant
2146 end of the register. */
2147 if (bytes % UNITS_PER_WORD != 0
2148 && (targetm.calls.return_in_msb (type)
2150 : BYTES_BIG_ENDIAN))
2152 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2154 /* Copy the structure BITSIZE bits at a time. If the target lives in
2155 memory, take care of not reading/writing past its end by selecting
2156 a copy mode suited to BITSIZE. This should always be possible given
2159 We could probably emit more efficient code for machines which do not use
2160 strict alignment, but it doesn't seem worth the effort at the current
2163 copy_mode = word_mode;
2166 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2167 if (mem_mode != BLKmode)
2168 copy_mode = mem_mode;
2171 for (bitpos = 0, xbitpos = padding_correction;
2172 bitpos < bytes * BITS_PER_UNIT;
2173 bitpos += bitsize, xbitpos += bitsize)
2175 /* We need a new source operand each time xbitpos is on a
2176 word boundary and when xbitpos == padding_correction
2177 (the first time through). */
2178 if (xbitpos % BITS_PER_WORD == 0
2179 || xbitpos == padding_correction)
2180 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2183 /* We need a new destination operand each time bitpos is on
2185 if (bitpos % BITS_PER_WORD == 0)
2186 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2188 /* Use xbitpos for the source extraction (right justified) and
2189 bitpos for the destination store (left justified). */
2190 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2191 extract_bit_field (src, bitsize,
2192 xbitpos % BITS_PER_WORD, 1,
2193 NULL_RTX, copy_mode, copy_mode));
2199 /* Add a USE expression for REG to the (possibly empty) list pointed
2200 to by CALL_FUSAGE. REG must denote a hard register. */
2203 use_reg (rtx *call_fusage, rtx reg)
2205 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2216 use_regs (rtx *call_fusage, int regno, int nregs)
2220 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2222 for (i = 0; i < nregs; i++)
2223 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2226 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2227 PARALLEL REGS. This is for calls that pass values in multiple
2228 non-contiguous locations. The Irix 6 ABI has examples of this. */
2231 use_group_regs (rtx *call_fusage, rtx regs)
2235 for (i = 0; i < XVECLEN (regs, 0); i++)
2237 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2239 /* A NULL entry means the parameter goes both on the stack and in
2240 registers. This can also be a MEM for targets that pass values
2241 partially on the stack and partially in registers. */
2242 if (reg != 0 && REG_P (reg))
2243 use_reg (call_fusage, reg);
2248 /* Determine whether the LEN bytes generated by CONSTFUN can be
2249 stored to memory using several move instructions. CONSTFUNDATA is
2250 a pointer which will be passed as argument in every CONSTFUN call.
2251 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2252 a memset operation and false if it's a copy of a constant string.
2253 Return nonzero if a call to store_by_pieces should succeed. */
2256 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2257 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2258 void *constfundata, unsigned int align, bool memsetp)
2260 unsigned HOST_WIDE_INT l;
2261 unsigned int max_size;
2262 HOST_WIDE_INT offset = 0;
2263 enum machine_mode mode, tmode;
2264 enum insn_code icode;
2272 ? SET_BY_PIECES_P (len, align)
2273 : STORE_BY_PIECES_P (len, align)))
2276 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2277 if (align >= GET_MODE_ALIGNMENT (tmode))
2278 align = GET_MODE_ALIGNMENT (tmode);
2281 enum machine_mode xmode;
2283 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2285 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2286 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2287 || SLOW_UNALIGNED_ACCESS (tmode, align))
2290 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2293 /* We would first store what we can in the largest integer mode, then go to
2294 successively smaller modes. */
2297 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2302 max_size = STORE_MAX_PIECES + 1;
2303 while (max_size > 1)
2305 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2306 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2307 if (GET_MODE_SIZE (tmode) < max_size)
2310 if (mode == VOIDmode)
2313 icode = optab_handler (mov_optab, mode)->insn_code;
2314 if (icode != CODE_FOR_nothing
2315 && align >= GET_MODE_ALIGNMENT (mode))
2317 unsigned int size = GET_MODE_SIZE (mode);
2324 cst = (*constfun) (constfundata, offset, mode);
2325 if (!LEGITIMATE_CONSTANT_P (cst))
2335 max_size = GET_MODE_SIZE (mode);
2338 /* The code above should have handled everything. */
2345 /* Generate several move instructions to store LEN bytes generated by
2346 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2347 pointer which will be passed as argument in every CONSTFUN call.
2348 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2349 a memset operation and false if it's a copy of a constant string.
2350 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2351 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2355 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2356 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2357 void *constfundata, unsigned int align, bool memsetp, int endp)
2359 struct store_by_pieces data;
2363 gcc_assert (endp != 2);
2368 ? SET_BY_PIECES_P (len, align)
2369 : STORE_BY_PIECES_P (len, align));
2370 data.constfun = constfun;
2371 data.constfundata = constfundata;
2374 store_by_pieces_1 (&data, align);
2379 gcc_assert (!data.reverse);
2384 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2385 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2387 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2390 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2397 to1 = adjust_address (data.to, QImode, data.offset);
2405 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2409 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2411 struct store_by_pieces data;
2416 data.constfun = clear_by_pieces_1;
2417 data.constfundata = NULL;
2420 store_by_pieces_1 (&data, align);
2423 /* Callback routine for clear_by_pieces.
2424 Return const0_rtx unconditionally. */
2427 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2428 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2429 enum machine_mode mode ATTRIBUTE_UNUSED)
2434 /* Subroutine of clear_by_pieces and store_by_pieces.
2435 Generate several move instructions to store LEN bytes of block TO. (A MEM
2436 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2439 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2440 unsigned int align ATTRIBUTE_UNUSED)
2442 rtx to_addr = XEXP (data->to, 0);
2443 unsigned int max_size = STORE_MAX_PIECES + 1;
2444 enum machine_mode mode = VOIDmode, tmode;
2445 enum insn_code icode;
2448 data->to_addr = to_addr;
2450 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2451 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2453 data->explicit_inc_to = 0;
2455 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2457 data->offset = data->len;
2459 /* If storing requires more than two move insns,
2460 copy addresses to registers (to make displacements shorter)
2461 and use post-increment if available. */
2462 if (!data->autinc_to
2463 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2465 /* Determine the main mode we'll be using. */
2466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2468 if (GET_MODE_SIZE (tmode) < max_size)
2471 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2473 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = -1;
2478 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2479 && ! data->autinc_to)
2481 data->to_addr = copy_addr_to_reg (to_addr);
2482 data->autinc_to = 1;
2483 data->explicit_inc_to = 1;
2486 if ( !data->autinc_to && CONSTANT_P (to_addr))
2487 data->to_addr = copy_addr_to_reg (to_addr);
2490 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2491 if (align >= GET_MODE_ALIGNMENT (tmode))
2492 align = GET_MODE_ALIGNMENT (tmode);
2495 enum machine_mode xmode;
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2499 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2501 || SLOW_UNALIGNED_ACCESS (tmode, align))
2504 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2507 /* First store what we can in the largest integer mode, then go to
2508 successively smaller modes. */
2510 while (max_size > 1)
2512 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2513 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2514 if (GET_MODE_SIZE (tmode) < max_size)
2517 if (mode == VOIDmode)
2520 icode = optab_handler (mov_optab, mode)->insn_code;
2521 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2522 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2524 max_size = GET_MODE_SIZE (mode);
2527 /* The code above should have handled everything. */
2528 gcc_assert (!data->len);
2531 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2532 with move instructions for mode MODE. GENFUN is the gen_... function
2533 to make a move insn for that mode. DATA has all the other info. */
2536 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2537 struct store_by_pieces *data)
2539 unsigned int size = GET_MODE_SIZE (mode);
2542 while (data->len >= size)
2545 data->offset -= size;
2547 if (data->autinc_to)
2548 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2551 to1 = adjust_address (data->to, mode, data->offset);
2553 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2554 emit_insn (gen_add2_insn (data->to_addr,
2555 GEN_INT (-(HOST_WIDE_INT) size)));
2557 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2558 emit_insn ((*genfun) (to1, cst));
2560 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2563 if (! data->reverse)
2564 data->offset += size;
2570 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2571 its length in bytes. */
2574 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2575 unsigned int expected_align, HOST_WIDE_INT expected_size)
2577 enum machine_mode mode = GET_MODE (object);
2580 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2582 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2583 just move a zero. Otherwise, do this a piece at a time. */
2585 && GET_CODE (size) == CONST_INT
2586 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2588 rtx zero = CONST0_RTX (mode);
2591 emit_move_insn (object, zero);
2595 if (COMPLEX_MODE_P (mode))
2597 zero = CONST0_RTX (GET_MODE_INNER (mode));
2600 write_complex_part (object, zero, 0);
2601 write_complex_part (object, zero, 1);
2607 if (size == const0_rtx)
2610 align = MEM_ALIGN (object);
2612 if (GET_CODE (size) == CONST_INT
2613 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2614 clear_by_pieces (object, INTVAL (size), align);
2615 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2616 expected_align, expected_size))
2619 return set_storage_via_libcall (object, size, const0_rtx,
2620 method == BLOCK_OP_TAILCALL);
2626 clear_storage (rtx object, rtx size, enum block_op_methods method)
2628 return clear_storage_hints (object, size, method, 0, -1);
2632 /* A subroutine of clear_storage. Expand a call to memset.
2633 Return the return value of memset, 0 otherwise. */
2636 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2638 tree call_expr, fn, object_tree, size_tree, val_tree;
2639 enum machine_mode size_mode;
2642 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2643 place those into new pseudos into a VAR_DECL and use them later. */
2645 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647 size_mode = TYPE_MODE (sizetype);
2648 size = convert_to_mode (size_mode, size, 1);
2649 size = copy_to_mode_reg (size_mode, size);
2651 /* It is incorrect to use the libcall calling conventions to call
2652 memset in this context. This could be a user call to memset and
2653 the user may wish to examine the return value from memset. For
2654 targets where libcalls and normal calls have different conventions
2655 for returning pointers, we could end up generating incorrect code. */
2657 object_tree = make_tree (ptr_type_node, object);
2658 if (GET_CODE (val) != CONST_INT)
2659 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2660 size_tree = make_tree (sizetype, size);
2661 val_tree = make_tree (integer_type_node, val);
2663 fn = clear_storage_libcall_fn (true);
2664 call_expr = build_call_expr (fn, 3,
2665 object_tree, integer_zero_node, size_tree);
2666 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2668 retval = expand_normal (call_expr);
2673 /* A subroutine of set_storage_via_libcall. Create the tree node
2674 for the function we use for block clears. The first time FOR_CALL
2675 is true, we call assemble_external. */
2677 static GTY(()) tree block_clear_fn;
2680 init_block_clear_fn (const char *asmspec)
2682 if (!block_clear_fn)
2686 fn = get_identifier ("memset");
2687 args = build_function_type_list (ptr_type_node, ptr_type_node,
2688 integer_type_node, sizetype,
2691 fn = build_decl (FUNCTION_DECL, fn, args);
2692 DECL_EXTERNAL (fn) = 1;
2693 TREE_PUBLIC (fn) = 1;
2694 DECL_ARTIFICIAL (fn) = 1;
2695 TREE_NOTHROW (fn) = 1;
2696 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2697 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2699 block_clear_fn = fn;
2703 set_user_assembler_name (block_clear_fn, asmspec);
2707 clear_storage_libcall_fn (int for_call)
2709 static bool emitted_extern;
2711 if (!block_clear_fn)
2712 init_block_clear_fn (NULL);
2714 if (for_call && !emitted_extern)
2716 emitted_extern = true;
2717 make_decl_rtl (block_clear_fn);
2718 assemble_external (block_clear_fn);
2721 return block_clear_fn;
2724 /* Expand a setmem pattern; return true if successful. */
2727 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2728 unsigned int expected_align, HOST_WIDE_INT expected_size)
2730 /* Try the most limited insn first, because there's no point
2731 including more than one in the machine description unless
2732 the more limited one has some advantage. */
2734 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2735 enum machine_mode mode;
2737 if (expected_align < align)
2738 expected_align = align;
2740 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2741 mode = GET_MODE_WIDER_MODE (mode))
2743 enum insn_code code = setmem_optab[(int) mode];
2744 insn_operand_predicate_fn pred;
2746 if (code != CODE_FOR_nothing
2747 /* We don't need MODE to be narrower than
2748 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2749 the mode mask, as it is returned by the macro, it will
2750 definitely be less than the actual mode mask. */
2751 && ((GET_CODE (size) == CONST_INT
2752 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2753 <= (GET_MODE_MASK (mode) >> 1)))
2754 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2755 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2756 || (*pred) (object, BLKmode))
2757 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2758 || (*pred) (opalign, VOIDmode)))
2761 enum machine_mode char_mode;
2762 rtx last = get_last_insn ();
2765 opsize = convert_to_mode (mode, size, 1);
2766 pred = insn_data[(int) code].operand[1].predicate;
2767 if (pred != 0 && ! (*pred) (opsize, mode))
2768 opsize = copy_to_mode_reg (mode, opsize);
2771 char_mode = insn_data[(int) code].operand[2].mode;
2772 if (char_mode != VOIDmode)
2774 opchar = convert_to_mode (char_mode, opchar, 1);
2775 pred = insn_data[(int) code].operand[2].predicate;
2776 if (pred != 0 && ! (*pred) (opchar, char_mode))
2777 opchar = copy_to_mode_reg (char_mode, opchar);
2780 if (insn_data[(int) code].n_operands == 4)
2781 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2783 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2784 GEN_INT (expected_align
2786 GEN_INT (expected_size));
2793 delete_insns_since (last);
2801 /* Write to one of the components of the complex value CPLX. Write VAL to
2802 the real part if IMAG_P is false, and the imaginary part if its true. */
2805 write_complex_part (rtx cplx, rtx val, bool imag_p)
2807 enum machine_mode cmode;
2808 enum machine_mode imode;
2811 if (GET_CODE (cplx) == CONCAT)
2813 emit_move_insn (XEXP (cplx, imag_p), val);
2817 cmode = GET_MODE (cplx);
2818 imode = GET_MODE_INNER (cmode);
2819 ibitsize = GET_MODE_BITSIZE (imode);
2821 /* For MEMs simplify_gen_subreg may generate an invalid new address
2822 because, e.g., the original address is considered mode-dependent
2823 by the target, which restricts simplify_subreg from invoking
2824 adjust_address_nv. Instead of preparing fallback support for an
2825 invalid address, we call adjust_address_nv directly. */
2828 emit_move_insn (adjust_address_nv (cplx, imode,
2829 imag_p ? GET_MODE_SIZE (imode) : 0),
2834 /* If the sub-object is at least word sized, then we know that subregging
2835 will work. This special case is important, since store_bit_field
2836 wants to operate on integer modes, and there's rarely an OImode to
2837 correspond to TCmode. */
2838 if (ibitsize >= BITS_PER_WORD
2839 /* For hard regs we have exact predicates. Assume we can split
2840 the original object if it spans an even number of hard regs.
2841 This special case is important for SCmode on 64-bit platforms
2842 where the natural size of floating-point regs is 32-bit. */
2844 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2845 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2847 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2848 imag_p ? GET_MODE_SIZE (imode) : 0);
2851 emit_move_insn (part, val);
2855 /* simplify_gen_subreg may fail for sub-word MEMs. */
2856 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2859 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2862 /* Extract one of the components of the complex value CPLX. Extract the
2863 real part if IMAG_P is false, and the imaginary part if it's true. */
2866 read_complex_part (rtx cplx, bool imag_p)
2868 enum machine_mode cmode, imode;
2871 if (GET_CODE (cplx) == CONCAT)
2872 return XEXP (cplx, imag_p);
2874 cmode = GET_MODE (cplx);
2875 imode = GET_MODE_INNER (cmode);
2876 ibitsize = GET_MODE_BITSIZE (imode);
2878 /* Special case reads from complex constants that got spilled to memory. */
2879 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2881 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2882 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2884 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2885 if (CONSTANT_CLASS_P (part))
2886 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2890 /* For MEMs simplify_gen_subreg may generate an invalid new address
2891 because, e.g., the original address is considered mode-dependent
2892 by the target, which restricts simplify_subreg from invoking
2893 adjust_address_nv. Instead of preparing fallback support for an
2894 invalid address, we call adjust_address_nv directly. */
2896 return adjust_address_nv (cplx, imode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2899 /* If the sub-object is at least word sized, then we know that subregging
2900 will work. This special case is important, since extract_bit_field
2901 wants to operate on integer modes, and there's rarely an OImode to
2902 correspond to TCmode. */
2903 if (ibitsize >= BITS_PER_WORD
2904 /* For hard regs we have exact predicates. Assume we can split
2905 the original object if it spans an even number of hard regs.
2906 This special case is important for SCmode on 64-bit platforms
2907 where the natural size of floating-point regs is 32-bit. */
2909 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2910 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2912 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2913 imag_p ? GET_MODE_SIZE (imode) : 0);
2917 /* simplify_gen_subreg may fail for sub-word MEMs. */
2918 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2921 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2922 true, NULL_RTX, imode, imode);
2925 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2926 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2927 represented in NEW_MODE. If FORCE is true, this will never happen, as
2928 we'll force-create a SUBREG if needed. */
2931 emit_move_change_mode (enum machine_mode new_mode,
2932 enum machine_mode old_mode, rtx x, bool force)
2936 if (push_operand (x, GET_MODE (x)))
2938 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2939 MEM_COPY_ATTRIBUTES (ret, x);
2943 /* We don't have to worry about changing the address since the
2944 size in bytes is supposed to be the same. */
2945 if (reload_in_progress)
2947 /* Copy the MEM to change the mode and move any
2948 substitutions from the old MEM to the new one. */
2949 ret = adjust_address_nv (x, new_mode, 0);
2950 copy_replacements (x, ret);
2953 ret = adjust_address (x, new_mode, 0);
2957 /* Note that we do want simplify_subreg's behavior of validating
2958 that the new mode is ok for a hard register. If we were to use
2959 simplify_gen_subreg, we would create the subreg, but would
2960 probably run into the target not being able to implement it. */
2961 /* Except, of course, when FORCE is true, when this is exactly what
2962 we want. Which is needed for CCmodes on some targets. */
2964 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2966 ret = simplify_subreg (new_mode, x, old_mode, 0);
2972 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2973 an integer mode of the same size as MODE. Returns the instruction
2974 emitted, or NULL if such a move could not be generated. */
2977 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2979 enum machine_mode imode;
2980 enum insn_code code;
2982 /* There must exist a mode of the exact size we require. */
2983 imode = int_mode_for_mode (mode);
2984 if (imode == BLKmode)
2987 /* The target must support moves in this mode. */
2988 code = optab_handler (mov_optab, imode)->insn_code;
2989 if (code == CODE_FOR_nothing)
2992 x = emit_move_change_mode (imode, mode, x, force);
2995 y = emit_move_change_mode (imode, mode, y, force);
2998 return emit_insn (GEN_FCN (code) (x, y));
3001 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3002 Return an equivalent MEM that does not use an auto-increment. */
3005 emit_move_resolve_push (enum machine_mode mode, rtx x)
3007 enum rtx_code code = GET_CODE (XEXP (x, 0));
3008 HOST_WIDE_INT adjust;
3011 adjust = GET_MODE_SIZE (mode);
3012 #ifdef PUSH_ROUNDING
3013 adjust = PUSH_ROUNDING (adjust);
3015 if (code == PRE_DEC || code == POST_DEC)
3017 else if (code == PRE_MODIFY || code == POST_MODIFY)
3019 rtx expr = XEXP (XEXP (x, 0), 1);
3022 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3023 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3024 val = INTVAL (XEXP (expr, 1));
3025 if (GET_CODE (expr) == MINUS)
3027 gcc_assert (adjust == val || adjust == -val);
3031 /* Do not use anti_adjust_stack, since we don't want to update
3032 stack_pointer_delta. */
3033 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3034 GEN_INT (adjust), stack_pointer_rtx,
3035 0, OPTAB_LIB_WIDEN);
3036 if (temp != stack_pointer_rtx)
3037 emit_move_insn (stack_pointer_rtx, temp);
3044 temp = stack_pointer_rtx;
3049 temp = plus_constant (stack_pointer_rtx, -adjust);
3055 return replace_equiv_address (x, temp);
3058 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3059 X is known to satisfy push_operand, and MODE is known to be complex.
3060 Returns the last instruction emitted. */
3063 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3065 enum machine_mode submode = GET_MODE_INNER (mode);
3068 #ifdef PUSH_ROUNDING
3069 unsigned int submodesize = GET_MODE_SIZE (submode);
3071 /* In case we output to the stack, but the size is smaller than the
3072 machine can push exactly, we need to use move instructions. */
3073 if (PUSH_ROUNDING (submodesize) != submodesize)
3075 x = emit_move_resolve_push (mode, x);
3076 return emit_move_insn (x, y);
3080 /* Note that the real part always precedes the imag part in memory
3081 regardless of machine's endianness. */
3082 switch (GET_CODE (XEXP (x, 0)))
3096 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3097 read_complex_part (y, imag_first));
3098 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3099 read_complex_part (y, !imag_first));
3102 /* A subroutine of emit_move_complex. Perform the move from Y to X
3103 via two moves of the parts. Returns the last instruction emitted. */
3106 emit_move_complex_parts (rtx x, rtx y)
3108 /* Show the output dies here. This is necessary for SUBREGs
3109 of pseudos since we cannot track their lifetimes correctly;
3110 hard regs shouldn't appear here except as return values. */
3111 if (!reload_completed && !reload_in_progress
3112 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3115 write_complex_part (x, read_complex_part (y, false), false);
3116 write_complex_part (x, read_complex_part (y, true), true);
3118 return get_last_insn ();
3121 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3122 MODE is known to be complex. Returns the last instruction emitted. */
3125 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3129 /* Need to take special care for pushes, to maintain proper ordering
3130 of the data, and possibly extra padding. */
3131 if (push_operand (x, mode))
3132 return emit_move_complex_push (mode, x, y);
3134 /* See if we can coerce the target into moving both values at once. */
3136 /* Move floating point as parts. */
3137 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3138 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3140 /* Not possible if the values are inherently not adjacent. */
3141 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3143 /* Is possible if both are registers (or subregs of registers). */
3144 else if (register_operand (x, mode) && register_operand (y, mode))
3146 /* If one of the operands is a memory, and alignment constraints
3147 are friendly enough, we may be able to do combined memory operations.
3148 We do not attempt this if Y is a constant because that combination is
3149 usually better with the by-parts thing below. */
3150 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3151 && (!STRICT_ALIGNMENT
3152 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3161 /* For memory to memory moves, optimal behavior can be had with the
3162 existing block move logic. */
3163 if (MEM_P (x) && MEM_P (y))
3165 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3166 BLOCK_OP_NO_LIBCALL);
3167 return get_last_insn ();
3170 ret = emit_move_via_integer (mode, x, y, true);
3175 return emit_move_complex_parts (x, y);
3178 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3179 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3182 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3186 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3189 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3190 if (code != CODE_FOR_nothing)
3192 x = emit_move_change_mode (CCmode, mode, x, true);
3193 y = emit_move_change_mode (CCmode, mode, y, true);
3194 return emit_insn (GEN_FCN (code) (x, y));
3198 /* Otherwise, find the MODE_INT mode of the same width. */
3199 ret = emit_move_via_integer (mode, x, y, false);
3200 gcc_assert (ret != NULL);
3204 /* Return true if word I of OP lies entirely in the
3205 undefined bits of a paradoxical subreg. */
3208 undefined_operand_subword_p (const_rtx op, int i)
3210 enum machine_mode innermode, innermostmode;
3212 if (GET_CODE (op) != SUBREG)
3214 innermode = GET_MODE (op);
3215 innermostmode = GET_MODE (SUBREG_REG (op));
3216 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3217 /* The SUBREG_BYTE represents offset, as if the value were stored in
3218 memory, except for a paradoxical subreg where we define
3219 SUBREG_BYTE to be 0; undo this exception as in
3221 if (SUBREG_BYTE (op) == 0
3222 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3224 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3225 if (WORDS_BIG_ENDIAN)
3226 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3227 if (BYTES_BIG_ENDIAN)
3228 offset += difference % UNITS_PER_WORD;
3230 if (offset >= GET_MODE_SIZE (innermostmode)
3231 || offset <= -GET_MODE_SIZE (word_mode))
3236 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3237 MODE is any multi-word or full-word mode that lacks a move_insn
3238 pattern. Note that you will get better code if you define such
3239 patterns, even if they must turn into multiple assembler instructions. */
3242 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3249 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3251 /* If X is a push on the stack, do the push now and replace
3252 X with a reference to the stack pointer. */
3253 if (push_operand (x, mode))
3254 x = emit_move_resolve_push (mode, x);
3256 /* If we are in reload, see if either operand is a MEM whose address
3257 is scheduled for replacement. */
3258 if (reload_in_progress && MEM_P (x)
3259 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3260 x = replace_equiv_address_nv (x, inner);
3261 if (reload_in_progress && MEM_P (y)
3262 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3263 y = replace_equiv_address_nv (y, inner);
3267 need_clobber = false;
3269 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3272 rtx xpart = operand_subword (x, i, 1, mode);
3275 /* Do not generate code for a move if it would come entirely
3276 from the undefined bits of a paradoxical subreg. */
3277 if (undefined_operand_subword_p (y, i))
3280 ypart = operand_subword (y, i, 1, mode);
3282 /* If we can't get a part of Y, put Y into memory if it is a
3283 constant. Otherwise, force it into a register. Then we must
3284 be able to get a part of Y. */
3285 if (ypart == 0 && CONSTANT_P (y))
3287 y = use_anchored_address (force_const_mem (mode, y));
3288 ypart = operand_subword (y, i, 1, mode);
3290 else if (ypart == 0)
3291 ypart = operand_subword_force (y, i, mode);
3293 gcc_assert (xpart && ypart);
3295 need_clobber |= (GET_CODE (xpart) == SUBREG);
3297 last_insn = emit_move_insn (xpart, ypart);
3303 /* Show the output dies here. This is necessary for SUBREGs
3304 of pseudos since we cannot track their lifetimes correctly;
3305 hard regs shouldn't appear here except as return values.
3306 We never want to emit such a clobber after reload. */
3308 && ! (reload_in_progress || reload_completed)
3309 && need_clobber != 0)
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3322 emit_move_insn_1 (rtx x, rtx y)
3324 enum machine_mode mode = GET_MODE (x);
3325 enum insn_code code;
3327 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3329 code = optab_handler (mov_optab, mode)->insn_code;
3330 if (code != CODE_FOR_nothing)
3331 return emit_insn (GEN_FCN (code) (x, y));
3333 /* Expand complex moves by moving real part and imag part. */
3334 if (COMPLEX_MODE_P (mode))
3335 return emit_move_complex (mode, x, y);
3337 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3338 || ALL_FIXED_POINT_MODE_P (mode))
3340 rtx result = emit_move_via_integer (mode, x, y, true);
3342 /* If we can't find an integer mode, use multi words. */
3346 return emit_move_multi_word (mode, x, y);
3349 if (GET_MODE_CLASS (mode) == MODE_CC)
3350 return emit_move_ccmode (mode, x, y);
3352 /* Try using a move pattern for the corresponding integer mode. This is
3353 only safe when simplify_subreg can convert MODE constants into integer
3354 constants. At present, it can only do this reliably if the value
3355 fits within a HOST_WIDE_INT. */
3356 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3358 rtx ret = emit_move_via_integer (mode, x, y, false);
3363 return emit_move_multi_word (mode, x, y);
3366 /* Generate code to copy Y into X.
3367 Both Y and X must have the same mode, except that
3368 Y can be a constant with VOIDmode.
3369 This mode cannot be BLKmode; use emit_block_move for that.
3371 Return the last instruction emitted. */
3374 emit_move_insn (rtx x, rtx y)
3376 enum machine_mode mode = GET_MODE (x);
3377 rtx y_cst = NULL_RTX;
3380 gcc_assert (mode != BLKmode
3381 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3386 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3387 && (last_insn = compress_float_constant (x, y)))
3392 if (!LEGITIMATE_CONSTANT_P (y))
3394 y = force_const_mem (mode, y);
3396 /* If the target's cannot_force_const_mem prevented the spill,
3397 assume that the target's move expanders will also take care
3398 of the non-legitimate constant. */
3402 y = use_anchored_address (y);
3406 /* If X or Y are memory references, verify that their addresses are valid
3409 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3410 && ! push_operand (x, GET_MODE (x))))
3411 x = validize_mem (x);
3414 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3415 y = validize_mem (y);
3417 gcc_assert (mode != BLKmode);
3419 last_insn = emit_move_insn_1 (x, y);
3421 if (y_cst && REG_P (x)
3422 && (set = single_set (last_insn)) != NULL_RTX
3423 && SET_DEST (set) == x
3424 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3425 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3430 /* If Y is representable exactly in a narrower mode, and the target can
3431 perform the extension directly from constant or memory, then emit the
3432 move as an extension. */
3435 compress_float_constant (rtx x, rtx y)
3437 enum machine_mode dstmode = GET_MODE (x);
3438 enum machine_mode orig_srcmode = GET_MODE (y);
3439 enum machine_mode srcmode;
3441 int oldcost, newcost;
3443 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3445 if (LEGITIMATE_CONSTANT_P (y))
3446 oldcost = rtx_cost (y, SET);
3448 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3450 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3451 srcmode != orig_srcmode;
3452 srcmode = GET_MODE_WIDER_MODE (srcmode))
3455 rtx trunc_y, last_insn;
3457 /* Skip if the target can't extend this way. */
3458 ic = can_extend_p (dstmode, srcmode, 0);
3459 if (ic == CODE_FOR_nothing)
3462 /* Skip if the narrowed value isn't exact. */
3463 if (! exact_real_truncate (srcmode, &r))
3466 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3468 if (LEGITIMATE_CONSTANT_P (trunc_y))
3470 /* Skip if the target needs extra instructions to perform
3472 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3474 /* This is valid, but may not be cheaper than the original. */
3475 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3476 if (oldcost < newcost)
3479 else if (float_extend_from_mem[dstmode][srcmode])
3481 trunc_y = force_const_mem (srcmode, trunc_y);
3482 /* This is valid, but may not be cheaper than the original. */
3483 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3484 if (oldcost < newcost)
3486 trunc_y = validize_mem (trunc_y);
3491 /* For CSE's benefit, force the compressed constant pool entry
3492 into a new pseudo. This constant may be used in different modes,
3493 and if not, combine will put things back together for us. */
3494 trunc_y = force_reg (srcmode, trunc_y);
3495 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3496 last_insn = get_last_insn ();
3499 set_unique_reg_note (last_insn, REG_EQUAL, y);
3507 /* Pushing data onto the stack. */
3509 /* Push a block of length SIZE (perhaps variable)
3510 and return an rtx to address the beginning of the block.
3511 The value may be virtual_outgoing_args_rtx.
3513 EXTRA is the number of bytes of padding to push in addition to SIZE.
3514 BELOW nonzero means this padding comes at low addresses;
3515 otherwise, the padding comes at high addresses. */
3518 push_block (rtx size, int extra, int below)
3522 size = convert_modes (Pmode, ptr_mode, size, 1);
3523 if (CONSTANT_P (size))
3524 anti_adjust_stack (plus_constant (size, extra));
3525 else if (REG_P (size) && extra == 0)
3526 anti_adjust_stack (size);
3529 temp = copy_to_mode_reg (Pmode, size);
3531 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3532 temp, 0, OPTAB_LIB_WIDEN);
3533 anti_adjust_stack (temp);
3536 #ifndef STACK_GROWS_DOWNWARD
3542 temp = virtual_outgoing_args_rtx;
3543 if (extra != 0 && below)
3544 temp = plus_constant (temp, extra);
3548 if (GET_CODE (size) == CONST_INT)
3549 temp = plus_constant (virtual_outgoing_args_rtx,
3550 -INTVAL (size) - (below ? 0 : extra));
3551 else if (extra != 0 && !below)
3552 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3553 negate_rtx (Pmode, plus_constant (size, extra)));
3555 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3556 negate_rtx (Pmode, size));
3559 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3562 #ifdef PUSH_ROUNDING
3564 /* Emit single push insn. */
3567 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3570 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3572 enum insn_code icode;
3573 insn_operand_predicate_fn pred;
3575 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3576 /* If there is push pattern, use it. Otherwise try old way of throwing
3577 MEM representing push operation to move expander. */
3578 icode = optab_handler (push_optab, mode)->insn_code;
3579 if (icode != CODE_FOR_nothing)
3581 if (((pred = insn_data[(int) icode].operand[0].predicate)
3582 && !((*pred) (x, mode))))
3583 x = force_reg (mode, x);
3584 emit_insn (GEN_FCN (icode) (x));
3587 if (GET_MODE_SIZE (mode) == rounded_size)
3588 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3589 /* If we are to pad downward, adjust the stack pointer first and
3590 then store X into the stack location using an offset. This is
3591 because emit_move_insn does not know how to pad; it does not have
3593 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3595 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3596 HOST_WIDE_INT offset;
3598 emit_move_insn (stack_pointer_rtx,
3599 expand_binop (Pmode,
3600 #ifdef STACK_GROWS_DOWNWARD
3606 GEN_INT (rounded_size),
3607 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3609 offset = (HOST_WIDE_INT) padding_size;
3610 #ifdef STACK_GROWS_DOWNWARD
3611 if (STACK_PUSH_CODE == POST_DEC)
3612 /* We have already decremented the stack pointer, so get the
3614 offset += (HOST_WIDE_INT) rounded_size;
3616 if (STACK_PUSH_CODE == POST_INC)
3617 /* We have already incremented the stack pointer, so get the
3619 offset -= (HOST_WIDE_INT) rounded_size;
3621 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3625 #ifdef STACK_GROWS_DOWNWARD
3626 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3627 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3628 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3630 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3631 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3632 GEN_INT (rounded_size));
3634 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3637 dest = gen_rtx_MEM (mode, dest_addr);
3641 set_mem_attributes (dest, type, 1);
3643 if (flag_optimize_sibling_calls)
3644 /* Function incoming arguments may overlap with sibling call
3645 outgoing arguments and we cannot allow reordering of reads
3646 from function arguments with stores to outgoing arguments
3647 of sibling calls. */
3648 set_mem_alias_set (dest, 0);
3650 emit_move_insn (dest, x);
3654 /* Generate code to push X onto the stack, assuming it has mode MODE and
3656 MODE is redundant except when X is a CONST_INT (since they don't
3658 SIZE is an rtx for the size of data to be copied (in bytes),
3659 needed only if X is BLKmode.
3661 ALIGN (in bits) is maximum alignment we can assume.
3663 If PARTIAL and REG are both nonzero, then copy that many of the first
3664 bytes of X into registers starting with REG, and push the rest of X.
3665 The amount of space pushed is decreased by PARTIAL bytes.
3666 REG must be a hard register in this case.
3667 If REG is zero but PARTIAL is not, take any all others actions for an
3668 argument partially in registers, but do not actually load any
3671 EXTRA is the amount in bytes of extra space to leave next to this arg.
3672 This is ignored if an argument block has already been allocated.
3674 On a machine that lacks real push insns, ARGS_ADDR is the address of
3675 the bottom of the argument block for this call. We use indexing off there
3676 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3677 argument block has not been preallocated.
3679 ARGS_SO_FAR is the size of args previously pushed for this call.
3681 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3682 for arguments passed in registers. If nonzero, it will be the number
3683 of bytes required. */
3686 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3687 unsigned int align, int partial, rtx reg, int extra,
3688 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3692 enum direction stack_direction
3693 #ifdef STACK_GROWS_DOWNWARD
3699 /* Decide where to pad the argument: `downward' for below,
3700 `upward' for above, or `none' for don't pad it.
3701 Default is below for small data on big-endian machines; else above. */
3702 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3704 /* Invert direction if stack is post-decrement.
3706 if (STACK_PUSH_CODE == POST_DEC)
3707 if (where_pad != none)
3708 where_pad = (where_pad == downward ? upward : downward);
3713 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3715 /* Copy a block into the stack, entirely or partially. */
3722 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3723 used = partial - offset;
3725 if (mode != BLKmode)
3727 /* A value is to be stored in an insufficiently aligned
3728 stack slot; copy via a suitably aligned slot if
3730 size = GEN_INT (GET_MODE_SIZE (mode));
3731 if (!MEM_P (xinner))
3733 temp = assign_temp (type, 0, 1, 1);
3734 emit_move_insn (temp, xinner);
3741 /* USED is now the # of bytes we need not copy to the stack
3742 because registers will take care of them. */
3745 xinner = adjust_address (xinner, BLKmode, used);
3747 /* If the partial register-part of the arg counts in its stack size,
3748 skip the part of stack space corresponding to the registers.
3749 Otherwise, start copying to the beginning of the stack space,
3750 by setting SKIP to 0. */
3751 skip = (reg_parm_stack_space == 0) ? 0 : used;
3753 #ifdef PUSH_ROUNDING
3754 /* Do it with several push insns if that doesn't take lots of insns
3755 and if there is no difficulty with push insns that skip bytes
3756 on the stack for alignment purposes. */
3759 && GET_CODE (size) == CONST_INT
3761 && MEM_ALIGN (xinner) >= align
3762 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3763 /* Here we avoid the case of a structure whose weak alignment
3764 forces many pushes of a small amount of data,
3765 and such small pushes do rounding that causes trouble. */
3766 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3767 || align >= BIGGEST_ALIGNMENT
3768 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3769 == (align / BITS_PER_UNIT)))
3770 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3772 /* Push padding now if padding above and stack grows down,
3773 or if padding below and stack grows up.
3774 But if space already allocated, this has already been done. */
3775 if (extra && args_addr == 0
3776 && where_pad != none && where_pad != stack_direction)
3777 anti_adjust_stack (GEN_INT (extra));
3779 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3782 #endif /* PUSH_ROUNDING */
3786 /* Otherwise make space on the stack and copy the data
3787 to the address of that space. */
3789 /* Deduct words put into registers from the size we must copy. */
3792 if (GET_CODE (size) == CONST_INT)
3793 size = GEN_INT (INTVAL (size) - used);
3795 size = expand_binop (GET_MODE (size), sub_optab, size,
3796 GEN_INT (used), NULL_RTX, 0,
3800 /* Get the address of the stack space.
3801 In this case, we do not deal with EXTRA separately.
3802 A single stack adjust will do. */
3805 temp = push_block (size, extra, where_pad == downward);
3808 else if (GET_CODE (args_so_far) == CONST_INT)
3809 temp = memory_address (BLKmode,
3810 plus_constant (args_addr,
3811 skip + INTVAL (args_so_far)));
3813 temp = memory_address (BLKmode,
3814 plus_constant (gen_rtx_PLUS (Pmode,
3819 if (!ACCUMULATE_OUTGOING_ARGS)
3821 /* If the source is referenced relative to the stack pointer,
3822 copy it to another register to stabilize it. We do not need
3823 to do this if we know that we won't be changing sp. */
3825 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3826 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3827 temp = copy_to_reg (temp);
3830 target = gen_rtx_MEM (BLKmode, temp);
3832 /* We do *not* set_mem_attributes here, because incoming arguments
3833 may overlap with sibling call outgoing arguments and we cannot
3834 allow reordering of reads from function arguments with stores
3835 to outgoing arguments of sibling calls. We do, however, want
3836 to record the alignment of the stack slot. */
3837 /* ALIGN may well be better aligned than TYPE, e.g. due to
3838 PARM_BOUNDARY. Assume the caller isn't lying. */
3839 set_mem_align (target, align);
3841 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3844 else if (partial > 0)
3846 /* Scalar partly in registers. */
3848 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3851 /* # bytes of start of argument
3852 that we must make space for but need not store. */
3853 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3854 int args_offset = INTVAL (args_so_far);
3857 /* Push padding now if padding above and stack grows down,
3858 or if padding below and stack grows up.
3859 But if space already allocated, this has already been done. */
3860 if (extra && args_addr == 0
3861 && where_pad != none && where_pad != stack_direction)
3862 anti_adjust_stack (GEN_INT (extra));
3864 /* If we make space by pushing it, we might as well push
3865 the real data. Otherwise, we can leave OFFSET nonzero
3866 and leave the space uninitialized. */
3870 /* Now NOT_STACK gets the number of words that we don't need to
3871 allocate on the stack. Convert OFFSET to words too. */
3872 not_stack = (partial - offset) / UNITS_PER_WORD;
3873 offset /= UNITS_PER_WORD;
3875 /* If the partial register-part of the arg counts in its stack size,
3876 skip the part of stack space corresponding to the registers.
3877 Otherwise, start copying to the beginning of the stack space,
3878 by setting SKIP to 0. */
3879 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3881 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3882 x = validize_mem (force_const_mem (mode, x));
3884 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3885 SUBREGs of such registers are not allowed. */
3886 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3887 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3888 x = copy_to_reg (x);
3890 /* Loop over all the words allocated on the stack for this arg. */
3891 /* We can do it by words, because any scalar bigger than a word
3892 has a size a multiple of a word. */
3893 #ifndef PUSH_ARGS_REVERSED
3894 for (i = not_stack; i < size; i++)
3896 for (i = size - 1; i >= not_stack; i--)
3898 if (i >= not_stack + offset)
3899 emit_push_insn (operand_subword_force (x, i, mode),
3900 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3902 GEN_INT (args_offset + ((i - not_stack + skip)
3904 reg_parm_stack_space, alignment_pad);
3911 /* Push padding now if padding above and stack grows down,
3912 or if padding below and stack grows up.
3913 But if space already allocated, this has already been done. */
3914 if (extra && args_addr == 0
3915 && where_pad != none && where_pad != stack_direction)
3916 anti_adjust_stack (GEN_INT (extra));
3918 #ifdef PUSH_ROUNDING
3919 if (args_addr == 0 && PUSH_ARGS)
3920 emit_single_push_insn (mode, x, type);
3924 if (GET_CODE (args_so_far) == CONST_INT)
3926 = memory_address (mode,
3927 plus_constant (args_addr,
3928 INTVAL (args_so_far)));
3930 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3932 dest = gen_rtx_MEM (mode, addr);
3934 /* We do *not* set_mem_attributes here, because incoming arguments
3935 may overlap with sibling call outgoing arguments and we cannot
3936 allow reordering of reads from function arguments with stores
3937 to outgoing arguments of sibling calls. We do, however, want
3938 to record the alignment of the stack slot. */
3939 /* ALIGN may well be better aligned than TYPE, e.g. due to
3940 PARM_BOUNDARY. Assume the caller isn't lying. */
3941 set_mem_align (dest, align);
3943 emit_move_insn (dest, x);
3947 /* If part should go in registers, copy that part
3948 into the appropriate registers. Do this now, at the end,
3949 since mem-to-mem copies above may do function calls. */
3950 if (partial > 0 && reg != 0)
3952 /* Handle calls that pass values in multiple non-contiguous locations.
3953 The Irix 6 ABI has examples of this. */
3954 if (GET_CODE (reg) == PARALLEL)
3955 emit_group_load (reg, x, type, -1);
3958 gcc_assert (partial % UNITS_PER_WORD == 0);
3959 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3963 if (extra && args_addr == 0 && where_pad == stack_direction)
3964 anti_adjust_stack (GEN_INT (extra));
3966 if (alignment_pad && args_addr == 0)
3967 anti_adjust_stack (alignment_pad);
3970 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3974 get_subtarget (rtx x)
3978 /* Only registers can be subtargets. */
3980 /* Don't use hard regs to avoid extending their life. */
3981 || REGNO (x) < FIRST_PSEUDO_REGISTER
3985 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3986 FIELD is a bitfield. Returns true if the optimization was successful,
3987 and there's nothing else to do. */
3990 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3991 unsigned HOST_WIDE_INT bitpos,
3992 enum machine_mode mode1, rtx str_rtx,
3995 enum machine_mode str_mode = GET_MODE (str_rtx);
3996 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4001 if (mode1 != VOIDmode
4002 || bitsize >= BITS_PER_WORD
4003 || str_bitsize > BITS_PER_WORD
4004 || TREE_SIDE_EFFECTS (to)
4005 || TREE_THIS_VOLATILE (to))
4009 if (!BINARY_CLASS_P (src)
4010 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4013 op0 = TREE_OPERAND (src, 0);
4014 op1 = TREE_OPERAND (src, 1);
4017 if (!operand_equal_p (to, op0, 0))
4020 if (MEM_P (str_rtx))
4022 unsigned HOST_WIDE_INT offset1;
4024 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4025 str_mode = word_mode;
4026 str_mode = get_best_mode (bitsize, bitpos,
4027 MEM_ALIGN (str_rtx), str_mode, 0);
4028 if (str_mode == VOIDmode)
4030 str_bitsize = GET_MODE_BITSIZE (str_mode);
4033 bitpos %= str_bitsize;
4034 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4035 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4037 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4040 /* If the bit field covers the whole REG/MEM, store_field
4041 will likely generate better code. */
4042 if (bitsize >= str_bitsize)
4045 /* We can't handle fields split across multiple entities. */
4046 if (bitpos + bitsize > str_bitsize)
4049 if (BYTES_BIG_ENDIAN)
4050 bitpos = str_bitsize - bitpos - bitsize;
4052 switch (TREE_CODE (src))
4056 /* For now, just optimize the case of the topmost bitfield
4057 where we don't need to do any masking and also
4058 1 bit bitfields where xor can be used.
4059 We might win by one instruction for the other bitfields
4060 too if insv/extv instructions aren't used, so that
4061 can be added later. */
4062 if (bitpos + bitsize != str_bitsize
4063 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4066 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4067 value = convert_modes (str_mode,
4068 TYPE_MODE (TREE_TYPE (op1)), value,
4069 TYPE_UNSIGNED (TREE_TYPE (op1)));
4071 /* We may be accessing data outside the field, which means
4072 we can alias adjacent data. */
4073 if (MEM_P (str_rtx))
4075 str_rtx = shallow_copy_rtx (str_rtx);
4076 set_mem_alias_set (str_rtx, 0);
4077 set_mem_expr (str_rtx, 0);
4080 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4081 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4083 value = expand_and (str_mode, value, const1_rtx, NULL);
4086 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4087 build_int_cst (NULL_TREE, bitpos),
4089 result = expand_binop (str_mode, binop, str_rtx,
4090 value, str_rtx, 1, OPTAB_WIDEN);
4091 if (result != str_rtx)
4092 emit_move_insn (str_rtx, result);
4097 if (TREE_CODE (op1) != INTEGER_CST)
4099 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4100 value = convert_modes (GET_MODE (str_rtx),
4101 TYPE_MODE (TREE_TYPE (op1)), value,
4102 TYPE_UNSIGNED (TREE_TYPE (op1)));
4104 /* We may be accessing data outside the field, which means
4105 we can alias adjacent data. */
4106 if (MEM_P (str_rtx))
4108 str_rtx = shallow_copy_rtx (str_rtx);
4109 set_mem_alias_set (str_rtx, 0);
4110 set_mem_expr (str_rtx, 0);
4113 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4114 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4116 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4118 value = expand_and (GET_MODE (str_rtx), value, mask,
4121 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4122 build_int_cst (NULL_TREE, bitpos),
4124 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4125 value, str_rtx, 1, OPTAB_WIDEN);
4126 if (result != str_rtx)
4127 emit_move_insn (str_rtx, result);
4138 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4139 is true, try generating a nontemporal store. */
4142 expand_assignment (tree to, tree from, bool nontemporal)
4147 /* Don't crash if the lhs of the assignment was erroneous. */
4148 if (TREE_CODE (to) == ERROR_MARK)
4150 result = expand_normal (from);
4154 /* Optimize away no-op moves without side-effects. */
4155 if (operand_equal_p (to, from, 0))
4158 /* Assignment of a structure component needs special treatment
4159 if the structure component's rtx is not simply a MEM.
4160 Assignment of an array element at a constant index, and assignment of
4161 an array element in an unaligned packed structure field, has the same
4163 if (handled_component_p (to)
4164 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4166 enum machine_mode mode1;
4167 HOST_WIDE_INT bitsize, bitpos;
4174 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4175 &unsignedp, &volatilep, true);
4177 /* If we are going to use store_bit_field and extract_bit_field,
4178 make sure to_rtx will be safe for multiple use. */
4180 to_rtx = expand_normal (tem);
4186 if (!MEM_P (to_rtx))
4188 /* We can get constant negative offsets into arrays with broken
4189 user code. Translate this to a trap instead of ICEing. */
4190 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4191 expand_builtin_trap ();
4192 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4195 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4196 #ifdef POINTERS_EXTEND_UNSIGNED
4197 if (GET_MODE (offset_rtx) != Pmode)
4198 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4200 if (GET_MODE (offset_rtx) != ptr_mode)
4201 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4204 /* A constant address in TO_RTX can have VOIDmode, we must not try
4205 to call force_reg for that case. Avoid that case. */
4207 && GET_MODE (to_rtx) == BLKmode
4208 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4210 && (bitpos % bitsize) == 0
4211 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4212 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4214 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4218 to_rtx = offset_address (to_rtx, offset_rtx,
4219 highest_pow2_factor_for_target (to,
4223 /* Handle expand_expr of a complex value returning a CONCAT. */
4224 if (GET_CODE (to_rtx) == CONCAT)
4226 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4228 gcc_assert (bitpos == 0);
4229 result = store_expr (from, to_rtx, false, nontemporal);
4233 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4234 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4242 /* If the field is at offset zero, we could have been given the
4243 DECL_RTX of the parent struct. Don't munge it. */
4244 to_rtx = shallow_copy_rtx (to_rtx);
4246 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4248 /* Deal with volatile and readonly fields. The former is only
4249 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4251 MEM_VOLATILE_P (to_rtx) = 1;
4252 if (component_uses_parent_alias_set (to))
4253 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4256 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4260 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4261 TREE_TYPE (tem), get_alias_set (to),
4266 preserve_temp_slots (result);
4272 /* If the rhs is a function call and its value is not an aggregate,
4273 call the function before we start to compute the lhs.
4274 This is needed for correct code for cases such as
4275 val = setjmp (buf) on machines where reference to val
4276 requires loading up part of an address in a separate insn.
4278 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4279 since it might be a promoted variable where the zero- or sign- extension
4280 needs to be done. Handling this in the normal way is safe because no
4281 computation is done before the call. */
4282 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4283 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4284 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4285 && REG_P (DECL_RTL (to))))
4290 value = expand_normal (from);
4292 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4294 /* Handle calls that return values in multiple non-contiguous locations.
4295 The Irix 6 ABI has examples of this. */
4296 if (GET_CODE (to_rtx) == PARALLEL)
4297 emit_group_load (to_rtx, value, TREE_TYPE (from),
4298 int_size_in_bytes (TREE_TYPE (from)));
4299 else if (GET_MODE (to_rtx) == BLKmode)
4300 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4303 if (POINTER_TYPE_P (TREE_TYPE (to)))
4304 value = convert_memory_address (GET_MODE (to_rtx), value);
4305 emit_move_insn (to_rtx, value);
4307 preserve_temp_slots (to_rtx);
4313 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4314 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4317 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4319 /* Don't move directly into a return register. */
4320 if (TREE_CODE (to) == RESULT_DECL
4321 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4326 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4328 if (GET_CODE (to_rtx) == PARALLEL)
4329 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4330 int_size_in_bytes (TREE_TYPE (from)));
4332 emit_move_insn (to_rtx, temp);
4334 preserve_temp_slots (to_rtx);
4340 /* In case we are returning the contents of an object which overlaps
4341 the place the value is being stored, use a safe function when copying
4342 a value through a pointer into a structure value return block. */
4343 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4344 && cfun->returns_struct
4345 && !cfun->returns_pcc_struct)
4350 size = expr_size (from);
4351 from_rtx = expand_normal (from);
4353 emit_library_call (memmove_libfunc, LCT_NORMAL,
4354 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4355 XEXP (from_rtx, 0), Pmode,
4356 convert_to_mode (TYPE_MODE (sizetype),
4357 size, TYPE_UNSIGNED (sizetype)),
4358 TYPE_MODE (sizetype));
4360 preserve_temp_slots (to_rtx);
4366 /* Compute FROM and store the value in the rtx we got. */
4369 result = store_expr (from, to_rtx, 0, nontemporal);
4370 preserve_temp_slots (result);
4376 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4377 succeeded, false otherwise. */
4380 emit_storent_insn (rtx to, rtx from)
4382 enum machine_mode mode = GET_MODE (to), imode;
4383 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4386 if (code == CODE_FOR_nothing)
4389 imode = insn_data[code].operand[0].mode;
4390 if (!insn_data[code].operand[0].predicate (to, imode))
4393 imode = insn_data[code].operand[1].mode;
4394 if (!insn_data[code].operand[1].predicate (from, imode))
4396 from = copy_to_mode_reg (imode, from);
4397 if (!insn_data[code].operand[1].predicate (from, imode))
4401 pattern = GEN_FCN (code) (to, from);
4402 if (pattern == NULL_RTX)
4405 emit_insn (pattern);
4409 /* Generate code for computing expression EXP,
4410 and storing the value into TARGET.
4412 If the mode is BLKmode then we may return TARGET itself.
4413 It turns out that in BLKmode it doesn't cause a problem.
4414 because C has no operators that could combine two different
4415 assignments into the same BLKmode object with different values
4416 with no sequence point. Will other languages need this to
4419 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4420 stack, and block moves may need to be treated specially.
4422 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4425 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4428 rtx alt_rtl = NULL_RTX;
4429 int dont_return_target = 0;
4431 if (VOID_TYPE_P (TREE_TYPE (exp)))
4433 /* C++ can generate ?: expressions with a throw expression in one
4434 branch and an rvalue in the other. Here, we resolve attempts to
4435 store the throw expression's nonexistent result. */
4436 gcc_assert (!call_param_p);
4437 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440 if (TREE_CODE (exp) == COMPOUND_EXPR)
4442 /* Perform first part of compound expression, then assign from second
4444 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4445 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4446 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4449 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4451 /* For conditional expression, get safe form of the target. Then
4452 test the condition, doing the appropriate assignment on either
4453 side. This avoids the creation of unnecessary temporaries.
4454 For non-BLKmode, it is more efficient not to do this. */
4456 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4458 do_pending_stack_adjust ();
4460 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4461 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4463 emit_jump_insn (gen_jump (lab2));
4466 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4473 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4474 /* If this is a scalar in a register that is stored in a wider mode
4475 than the declared mode, compute the result into its declared mode
4476 and then convert to the wider mode. Our value is the computed
4479 rtx inner_target = 0;
4481 /* We can do the conversion inside EXP, which will often result
4482 in some optimizations. Do the conversion in two steps: first
4483 change the signedness, if needed, then the extend. But don't
4484 do this if the type of EXP is a subtype of something else
4485 since then the conversion might involve more than just
4486 converting modes. */
4487 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4488 && TREE_TYPE (TREE_TYPE (exp)) == 0
4489 && GET_MODE_PRECISION (GET_MODE (target))
4490 == TYPE_PRECISION (TREE_TYPE (exp)))
4492 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4493 != SUBREG_PROMOTED_UNSIGNED_P (target))
4495 /* Some types, e.g. Fortran's logical*4, won't have a signed
4496 version, so use the mode instead. */
4498 = (signed_or_unsigned_type_for
4499 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4501 ntype = lang_hooks.types.type_for_mode
4502 (TYPE_MODE (TREE_TYPE (exp)),
4503 SUBREG_PROMOTED_UNSIGNED_P (target));
4505 exp = fold_convert (ntype, exp);
4508 exp = fold_convert (lang_hooks.types.type_for_mode
4509 (GET_MODE (SUBREG_REG (target)),
4510 SUBREG_PROMOTED_UNSIGNED_P (target)),
4513 inner_target = SUBREG_REG (target);
4516 temp = expand_expr (exp, inner_target, VOIDmode,
4517 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4519 /* If TEMP is a VOIDmode constant, use convert_modes to make
4520 sure that we properly convert it. */
4521 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4523 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4524 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4525 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4526 GET_MODE (target), temp,
4527 SUBREG_PROMOTED_UNSIGNED_P (target));
4530 convert_move (SUBREG_REG (target), temp,
4531 SUBREG_PROMOTED_UNSIGNED_P (target));
4535 else if (TREE_CODE (exp) == STRING_CST
4536 && !nontemporal && !call_param_p
4537 && TREE_STRING_LENGTH (exp) > 0
4538 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4540 /* Optimize initialization of an array with a STRING_CST. */
4541 HOST_WIDE_INT exp_len, str_copy_len;
4544 exp_len = int_expr_size (exp);
4548 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4549 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4552 str_copy_len = TREE_STRING_LENGTH (exp);
4553 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4555 str_copy_len += STORE_MAX_PIECES - 1;
4556 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4558 str_copy_len = MIN (str_copy_len, exp_len);
4559 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4560 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4561 MEM_ALIGN (target), false))
4566 dest_mem = store_by_pieces (dest_mem,
4567 str_copy_len, builtin_strncpy_read_str,
4568 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4569 MEM_ALIGN (target), false,
4570 exp_len > str_copy_len ? 1 : 0);
4571 if (exp_len > str_copy_len)
4572 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4573 GEN_INT (exp_len - str_copy_len),
4582 /* If we want to use a nontemporal store, force the value to
4584 tmp_target = nontemporal ? NULL_RTX : target;
4585 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4587 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4589 /* Return TARGET if it's a specified hardware register.
4590 If TARGET is a volatile mem ref, either return TARGET
4591 or return a reg copied *from* TARGET; ANSI requires this.
4593 Otherwise, if TEMP is not TARGET, return TEMP
4594 if it is constant (for efficiency),
4595 or if we really want the correct value. */
4596 if (!(target && REG_P (target)
4597 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4598 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4599 && ! rtx_equal_p (temp, target)
4600 && CONSTANT_P (temp))
4601 dont_return_target = 1;
4604 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4605 the same as that of TARGET, adjust the constant. This is needed, for
4606 example, in case it is a CONST_DOUBLE and we want only a word-sized
4608 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4609 && TREE_CODE (exp) != ERROR_MARK
4610 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4611 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4612 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4614 /* If value was not generated in the target, store it there.
4615 Convert the value to TARGET's type first if necessary and emit the
4616 pending incrementations that have been queued when expanding EXP.
4617 Note that we cannot emit the whole queue blindly because this will
4618 effectively disable the POST_INC optimization later.
4620 If TEMP and TARGET compare equal according to rtx_equal_p, but
4621 one or both of them are volatile memory refs, we have to distinguish
4623 - expand_expr has used TARGET. In this case, we must not generate
4624 another copy. This can be detected by TARGET being equal according
4626 - expand_expr has not used TARGET - that means that the source just
4627 happens to have the same RTX form. Since temp will have been created
4628 by expand_expr, it will compare unequal according to == .
4629 We must generate a copy in this case, to reach the correct number
4630 of volatile memory references. */
4632 if ((! rtx_equal_p (temp, target)
4633 || (temp != target && (side_effects_p (temp)
4634 || side_effects_p (target))))
4635 && TREE_CODE (exp) != ERROR_MARK
4636 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4637 but TARGET is not valid memory reference, TEMP will differ
4638 from TARGET although it is really the same location. */
4639 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4640 /* If there's nothing to copy, don't bother. Don't call
4641 expr_size unless necessary, because some front-ends (C++)
4642 expr_size-hook must not be given objects that are not
4643 supposed to be bit-copied or bit-initialized. */
4644 && expr_size (exp) != const0_rtx)
4646 if (GET_MODE (temp) != GET_MODE (target)
4647 && GET_MODE (temp) != VOIDmode)
4649 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4650 if (dont_return_target)
4652 /* In this case, we will return TEMP,
4653 so make sure it has the proper mode.
4654 But don't forget to store the value into TARGET. */
4655 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4656 emit_move_insn (target, temp);
4658 else if (GET_MODE (target) == BLKmode
4659 || GET_MODE (temp) == BLKmode)
4660 emit_block_move (target, temp, expr_size (exp),
4662 ? BLOCK_OP_CALL_PARM
4663 : BLOCK_OP_NORMAL));
4665 convert_move (target, temp, unsignedp);
4668 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4670 /* Handle copying a string constant into an array. The string
4671 constant may be shorter than the array. So copy just the string's
4672 actual length, and clear the rest. First get the size of the data
4673 type of the string, which is actually the size of the target. */
4674 rtx size = expr_size (exp);
4676 if (GET_CODE (size) == CONST_INT
4677 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4678 emit_block_move (target, temp, size,
4680 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4683 /* Compute the size of the data to copy from the string. */
4685 = size_binop (MIN_EXPR,
4686 make_tree (sizetype, size),
4687 size_int (TREE_STRING_LENGTH (exp)));
4689 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4691 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4694 /* Copy that much. */
4695 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4696 TYPE_UNSIGNED (sizetype));
4697 emit_block_move (target, temp, copy_size_rtx,
4699 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4701 /* Figure out how much is left in TARGET that we have to clear.
4702 Do all calculations in ptr_mode. */
4703 if (GET_CODE (copy_size_rtx) == CONST_INT)
4705 size = plus_constant (size, -INTVAL (copy_size_rtx));
4706 target = adjust_address (target, BLKmode,
4707 INTVAL (copy_size_rtx));
4711 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4712 copy_size_rtx, NULL_RTX, 0,
4715 #ifdef POINTERS_EXTEND_UNSIGNED
4716 if (GET_MODE (copy_size_rtx) != Pmode)
4717 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4718 TYPE_UNSIGNED (sizetype));
4721 target = offset_address (target, copy_size_rtx,
4722 highest_pow2_factor (copy_size));
4723 label = gen_label_rtx ();
4724 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4725 GET_MODE (size), 0, label);
4728 if (size != const0_rtx)
4729 clear_storage (target, size, BLOCK_OP_NORMAL);
4735 /* Handle calls that return values in multiple non-contiguous locations.
4736 The Irix 6 ABI has examples of this. */
4737 else if (GET_CODE (target) == PARALLEL)
4738 emit_group_load (target, temp, TREE_TYPE (exp),
4739 int_size_in_bytes (TREE_TYPE (exp)));
4740 else if (GET_MODE (temp) == BLKmode)
4741 emit_block_move (target, temp, expr_size (exp),
4743 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4744 else if (nontemporal
4745 && emit_storent_insn (target, temp))
4746 /* If we managed to emit a nontemporal store, there is nothing else to
4751 temp = force_operand (temp, target);
4753 emit_move_insn (target, temp);
4760 /* Helper for categorize_ctor_elements. Identical interface. */
4763 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4764 HOST_WIDE_INT *p_elt_count,
4767 unsigned HOST_WIDE_INT idx;
4768 HOST_WIDE_INT nz_elts, elt_count;
4769 tree value, purpose;
4771 /* Whether CTOR is a valid constant initializer, in accordance with what
4772 initializer_constant_valid_p does. If inferred from the constructor
4773 elements, true until proven otherwise. */
4774 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4775 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4780 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4785 if (TREE_CODE (purpose) == RANGE_EXPR)
4787 tree lo_index = TREE_OPERAND (purpose, 0);
4788 tree hi_index = TREE_OPERAND (purpose, 1);
4790 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4791 mult = (tree_low_cst (hi_index, 1)
4792 - tree_low_cst (lo_index, 1) + 1);
4795 switch (TREE_CODE (value))
4799 HOST_WIDE_INT nz = 0, ic = 0;
4802 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4804 nz_elts += mult * nz;
4805 elt_count += mult * ic;
4807 if (const_from_elts_p && const_p)
4808 const_p = const_elt_p;
4815 if (!initializer_zerop (value))
4821 nz_elts += mult * TREE_STRING_LENGTH (value);
4822 elt_count += mult * TREE_STRING_LENGTH (value);
4826 if (!initializer_zerop (TREE_REALPART (value)))
4828 if (!initializer_zerop (TREE_IMAGPART (value)))
4836 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4838 if (!initializer_zerop (TREE_VALUE (v)))
4849 if (const_from_elts_p && const_p)
4850 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4857 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4858 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4861 bool clear_this = true;
4863 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4865 /* We don't expect more than one element of the union to be
4866 initialized. Not sure what we should do otherwise... */
4867 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4870 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4871 CONSTRUCTOR_ELTS (ctor),
4874 /* ??? We could look at each element of the union, and find the
4875 largest element. Which would avoid comparing the size of the
4876 initialized element against any tail padding in the union.
4877 Doesn't seem worth the effort... */
4878 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4879 TYPE_SIZE (init_sub_type)) == 1)
4881 /* And now we have to find out if the element itself is fully
4882 constructed. E.g. for union { struct { int a, b; } s; } u
4883 = { .s = { .a = 1 } }. */
4884 if (elt_count == count_type_elements (init_sub_type, false))
4889 *p_must_clear = clear_this;
4892 *p_nz_elts += nz_elts;
4893 *p_elt_count += elt_count;
4898 /* Examine CTOR to discover:
4899 * how many scalar fields are set to nonzero values,
4900 and place it in *P_NZ_ELTS;
4901 * how many scalar fields in total are in CTOR,
4902 and place it in *P_ELT_COUNT.
4903 * if a type is a union, and the initializer from the constructor
4904 is not the largest element in the union, then set *p_must_clear.
4906 Return whether or not CTOR is a valid static constant initializer, the same
4907 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4910 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4911 HOST_WIDE_INT *p_elt_count,
4916 *p_must_clear = false;
4919 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4922 /* Count the number of scalars in TYPE. Return -1 on overflow or
4923 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4924 array member at the end of the structure. */
4927 count_type_elements (const_tree type, bool allow_flexarr)
4929 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4930 switch (TREE_CODE (type))
4934 tree telts = array_type_nelts (type);
4935 if (telts && host_integerp (telts, 1))
4937 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4938 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4941 else if (max / n > m)
4949 HOST_WIDE_INT n = 0, t;
4952 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4953 if (TREE_CODE (f) == FIELD_DECL)
4955 t = count_type_elements (TREE_TYPE (f), false);
4958 /* Check for structures with flexible array member. */
4959 tree tf = TREE_TYPE (f);
4961 && TREE_CHAIN (f) == NULL
4962 && TREE_CODE (tf) == ARRAY_TYPE
4964 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4965 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4966 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4967 && int_size_in_bytes (type) >= 0)
4979 case QUAL_UNION_TYPE:
4986 return TYPE_VECTOR_SUBPARTS (type);
4990 case FIXED_POINT_TYPE:
4995 case REFERENCE_TYPE:
5007 /* Return 1 if EXP contains mostly (3/4) zeros. */
5010 mostly_zeros_p (const_tree exp)
5012 if (TREE_CODE (exp) == CONSTRUCTOR)
5015 HOST_WIDE_INT nz_elts, count, elts;
5018 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5022 elts = count_type_elements (TREE_TYPE (exp), false);
5024 return nz_elts < elts / 4;
5027 return initializer_zerop (exp);
5030 /* Return 1 if EXP contains all zeros. */
5033 all_zeros_p (const_tree exp)
5035 if (TREE_CODE (exp) == CONSTRUCTOR)
5038 HOST_WIDE_INT nz_elts, count;
5041 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5042 return nz_elts == 0;
5045 return initializer_zerop (exp);
5048 /* Helper function for store_constructor.
5049 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5050 TYPE is the type of the CONSTRUCTOR, not the element type.
5051 CLEARED is as for store_constructor.
5052 ALIAS_SET is the alias set to use for any stores.
5054 This provides a recursive shortcut back to store_constructor when it isn't
5055 necessary to go through store_field. This is so that we can pass through
5056 the cleared field to let store_constructor know that we may not have to
5057 clear a substructure if the outer structure has already been cleared. */
5060 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5061 HOST_WIDE_INT bitpos, enum machine_mode mode,
5062 tree exp, tree type, int cleared,
5063 alias_set_type alias_set)
5065 if (TREE_CODE (exp) == CONSTRUCTOR
5066 /* We can only call store_constructor recursively if the size and
5067 bit position are on a byte boundary. */
5068 && bitpos % BITS_PER_UNIT == 0
5069 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5070 /* If we have a nonzero bitpos for a register target, then we just
5071 let store_field do the bitfield handling. This is unlikely to
5072 generate unnecessary clear instructions anyways. */
5073 && (bitpos == 0 || MEM_P (target)))
5077 = adjust_address (target,
5078 GET_MODE (target) == BLKmode
5080 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5081 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5084 /* Update the alias set, if required. */
5085 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5086 && MEM_ALIAS_SET (target) != 0)
5088 target = copy_rtx (target);
5089 set_mem_alias_set (target, alias_set);
5092 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5095 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5098 /* Store the value of constructor EXP into the rtx TARGET.
5099 TARGET is either a REG or a MEM; we know it cannot conflict, since
5100 safe_from_p has been called.
5101 CLEARED is true if TARGET is known to have been zero'd.
5102 SIZE is the number of bytes of TARGET we are allowed to modify: this
5103 may not be the same as the size of EXP if we are assigning to a field
5104 which has been packed to exclude padding bits. */
5107 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5109 tree type = TREE_TYPE (exp);
5110 #ifdef WORD_REGISTER_OPERATIONS
5111 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5114 switch (TREE_CODE (type))
5118 case QUAL_UNION_TYPE:
5120 unsigned HOST_WIDE_INT idx;
5123 /* If size is zero or the target is already cleared, do nothing. */
5124 if (size == 0 || cleared)
5126 /* We either clear the aggregate or indicate the value is dead. */
5127 else if ((TREE_CODE (type) == UNION_TYPE
5128 || TREE_CODE (type) == QUAL_UNION_TYPE)
5129 && ! CONSTRUCTOR_ELTS (exp))
5130 /* If the constructor is empty, clear the union. */
5132 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5136 /* If we are building a static constructor into a register,
5137 set the initial value as zero so we can fold the value into
5138 a constant. But if more than one register is involved,
5139 this probably loses. */
5140 else if (REG_P (target) && TREE_STATIC (exp)
5141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5143 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5147 /* If the constructor has fewer fields than the structure or
5148 if we are initializing the structure to mostly zeros, clear
5149 the whole structure first. Don't do this if TARGET is a
5150 register whose mode size isn't equal to SIZE since
5151 clear_storage can't handle this case. */
5153 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5154 != fields_length (type))
5155 || mostly_zeros_p (exp))
5157 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5160 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5164 if (REG_P (target) && !cleared)
5165 emit_clobber (target);
5167 /* Store each element of the constructor into the
5168 corresponding field of TARGET. */
5169 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5171 enum machine_mode mode;
5172 HOST_WIDE_INT bitsize;
5173 HOST_WIDE_INT bitpos = 0;
5175 rtx to_rtx = target;
5177 /* Just ignore missing fields. We cleared the whole
5178 structure, above, if any fields are missing. */
5182 if (cleared && initializer_zerop (value))
5185 if (host_integerp (DECL_SIZE (field), 1))
5186 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5190 mode = DECL_MODE (field);
5191 if (DECL_BIT_FIELD (field))
5194 offset = DECL_FIELD_OFFSET (field);
5195 if (host_integerp (offset, 0)
5196 && host_integerp (bit_position (field), 0))
5198 bitpos = int_bit_position (field);
5202 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5209 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5210 make_tree (TREE_TYPE (exp),
5213 offset_rtx = expand_normal (offset);
5214 gcc_assert (MEM_P (to_rtx));
5216 #ifdef POINTERS_EXTEND_UNSIGNED
5217 if (GET_MODE (offset_rtx) != Pmode)
5218 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5220 if (GET_MODE (offset_rtx) != ptr_mode)
5221 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5224 to_rtx = offset_address (to_rtx, offset_rtx,
5225 highest_pow2_factor (offset));
5228 #ifdef WORD_REGISTER_OPERATIONS
5229 /* If this initializes a field that is smaller than a
5230 word, at the start of a word, try to widen it to a full
5231 word. This special case allows us to output C++ member
5232 function initializations in a form that the optimizers
5235 && bitsize < BITS_PER_WORD
5236 && bitpos % BITS_PER_WORD == 0
5237 && GET_MODE_CLASS (mode) == MODE_INT
5238 && TREE_CODE (value) == INTEGER_CST
5240 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5242 tree type = TREE_TYPE (value);
5244 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5246 type = lang_hooks.types.type_for_size
5247 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5248 value = fold_convert (type, value);
5251 if (BYTES_BIG_ENDIAN)
5253 = fold_build2 (LSHIFT_EXPR, type, value,
5254 build_int_cst (type,
5255 BITS_PER_WORD - bitsize));
5256 bitsize = BITS_PER_WORD;
5261 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5262 && DECL_NONADDRESSABLE_P (field))
5264 to_rtx = copy_rtx (to_rtx);
5265 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5268 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5269 value, type, cleared,
5270 get_alias_set (TREE_TYPE (field)));
5277 unsigned HOST_WIDE_INT i;
5280 tree elttype = TREE_TYPE (type);
5282 HOST_WIDE_INT minelt = 0;
5283 HOST_WIDE_INT maxelt = 0;
5285 domain = TYPE_DOMAIN (type);
5286 const_bounds_p = (TYPE_MIN_VALUE (domain)
5287 && TYPE_MAX_VALUE (domain)
5288 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5289 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5291 /* If we have constant bounds for the range of the type, get them. */
5294 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5295 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5298 /* If the constructor has fewer elements than the array, clear
5299 the whole array first. Similarly if this is static
5300 constructor of a non-BLKmode object. */
5303 else if (REG_P (target) && TREE_STATIC (exp))
5307 unsigned HOST_WIDE_INT idx;
5309 HOST_WIDE_INT count = 0, zero_count = 0;
5310 need_to_clear = ! const_bounds_p;
5312 /* This loop is a more accurate version of the loop in
5313 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5314 is also needed to check for missing elements. */
5315 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5317 HOST_WIDE_INT this_node_count;
5322 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5324 tree lo_index = TREE_OPERAND (index, 0);
5325 tree hi_index = TREE_OPERAND (index, 1);
5327 if (! host_integerp (lo_index, 1)
5328 || ! host_integerp (hi_index, 1))
5334 this_node_count = (tree_low_cst (hi_index, 1)
5335 - tree_low_cst (lo_index, 1) + 1);
5338 this_node_count = 1;
5340 count += this_node_count;
5341 if (mostly_zeros_p (value))
5342 zero_count += this_node_count;
5345 /* Clear the entire array first if there are any missing
5346 elements, or if the incidence of zero elements is >=
5349 && (count < maxelt - minelt + 1
5350 || 4 * zero_count >= 3 * count))
5354 if (need_to_clear && size > 0)
5357 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5359 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5363 if (!cleared && REG_P (target))
5364 /* Inform later passes that the old value is dead. */
5365 emit_clobber (target);
5367 /* Store each element of the constructor into the
5368 corresponding element of TARGET, determined by counting the
5370 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5372 enum machine_mode mode;
5373 HOST_WIDE_INT bitsize;
5374 HOST_WIDE_INT bitpos;
5376 rtx xtarget = target;
5378 if (cleared && initializer_zerop (value))
5381 unsignedp = TYPE_UNSIGNED (elttype);
5382 mode = TYPE_MODE (elttype);
5383 if (mode == BLKmode)
5384 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5385 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5388 bitsize = GET_MODE_BITSIZE (mode);
5390 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5392 tree lo_index = TREE_OPERAND (index, 0);
5393 tree hi_index = TREE_OPERAND (index, 1);
5394 rtx index_r, pos_rtx;
5395 HOST_WIDE_INT lo, hi, count;
5398 /* If the range is constant and "small", unroll the loop. */
5400 && host_integerp (lo_index, 0)
5401 && host_integerp (hi_index, 0)
5402 && (lo = tree_low_cst (lo_index, 0),
5403 hi = tree_low_cst (hi_index, 0),
5404 count = hi - lo + 1,
5407 || (host_integerp (TYPE_SIZE (elttype), 1)
5408 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5411 lo -= minelt; hi -= minelt;
5412 for (; lo <= hi; lo++)
5414 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5417 && !MEM_KEEP_ALIAS_SET_P (target)
5418 && TREE_CODE (type) == ARRAY_TYPE
5419 && TYPE_NONALIASED_COMPONENT (type))
5421 target = copy_rtx (target);
5422 MEM_KEEP_ALIAS_SET_P (target) = 1;
5425 store_constructor_field
5426 (target, bitsize, bitpos, mode, value, type, cleared,
5427 get_alias_set (elttype));
5432 rtx loop_start = gen_label_rtx ();
5433 rtx loop_end = gen_label_rtx ();
5436 expand_normal (hi_index);
5437 unsignedp = TYPE_UNSIGNED (domain);
5439 index = build_decl (VAR_DECL, NULL_TREE, domain);
5442 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5444 SET_DECL_RTL (index, index_r);
5445 store_expr (lo_index, index_r, 0, false);
5447 /* Build the head of the loop. */
5448 do_pending_stack_adjust ();
5449 emit_label (loop_start);
5451 /* Assign value to element index. */
5453 fold_convert (ssizetype,
5454 fold_build2 (MINUS_EXPR,
5457 TYPE_MIN_VALUE (domain)));
5460 size_binop (MULT_EXPR, position,
5461 fold_convert (ssizetype,
5462 TYPE_SIZE_UNIT (elttype)));
5464 pos_rtx = expand_normal (position);
5465 xtarget = offset_address (target, pos_rtx,
5466 highest_pow2_factor (position));
5467 xtarget = adjust_address (xtarget, mode, 0);
5468 if (TREE_CODE (value) == CONSTRUCTOR)
5469 store_constructor (value, xtarget, cleared,
5470 bitsize / BITS_PER_UNIT);
5472 store_expr (value, xtarget, 0, false);
5474 /* Generate a conditional jump to exit the loop. */
5475 exit_cond = build2 (LT_EXPR, integer_type_node,
5477 jumpif (exit_cond, loop_end);
5479 /* Update the loop counter, and jump to the head of
5481 expand_assignment (index,
5482 build2 (PLUS_EXPR, TREE_TYPE (index),
5483 index, integer_one_node),
5486 emit_jump (loop_start);
5488 /* Build the end of the loop. */
5489 emit_label (loop_end);
5492 else if ((index != 0 && ! host_integerp (index, 0))
5493 || ! host_integerp (TYPE_SIZE (elttype), 1))
5498 index = ssize_int (1);
5501 index = fold_convert (ssizetype,
5502 fold_build2 (MINUS_EXPR,
5505 TYPE_MIN_VALUE (domain)));
5508 size_binop (MULT_EXPR, index,
5509 fold_convert (ssizetype,
5510 TYPE_SIZE_UNIT (elttype)));
5511 xtarget = offset_address (target,
5512 expand_normal (position),
5513 highest_pow2_factor (position));
5514 xtarget = adjust_address (xtarget, mode, 0);
5515 store_expr (value, xtarget, 0, false);
5520 bitpos = ((tree_low_cst (index, 0) - minelt)
5521 * tree_low_cst (TYPE_SIZE (elttype), 1));
5523 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5525 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5526 && TREE_CODE (type) == ARRAY_TYPE
5527 && TYPE_NONALIASED_COMPONENT (type))
5529 target = copy_rtx (target);
5530 MEM_KEEP_ALIAS_SET_P (target) = 1;
5532 store_constructor_field (target, bitsize, bitpos, mode, value,
5533 type, cleared, get_alias_set (elttype));
5541 unsigned HOST_WIDE_INT idx;
5542 constructor_elt *ce;
5546 tree elttype = TREE_TYPE (type);
5547 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5548 enum machine_mode eltmode = TYPE_MODE (elttype);
5549 HOST_WIDE_INT bitsize;
5550 HOST_WIDE_INT bitpos;
5551 rtvec vector = NULL;
5554 gcc_assert (eltmode != BLKmode);
5556 n_elts = TYPE_VECTOR_SUBPARTS (type);
5557 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5559 enum machine_mode mode = GET_MODE (target);
5561 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5562 if (icode != CODE_FOR_nothing)
5566 vector = rtvec_alloc (n_elts);
5567 for (i = 0; i < n_elts; i++)
5568 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5572 /* If the constructor has fewer elements than the vector,
5573 clear the whole array first. Similarly if this is static
5574 constructor of a non-BLKmode object. */
5577 else if (REG_P (target) && TREE_STATIC (exp))
5581 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5584 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5586 int n_elts_here = tree_low_cst
5587 (int_const_binop (TRUNC_DIV_EXPR,
5588 TYPE_SIZE (TREE_TYPE (value)),
5589 TYPE_SIZE (elttype), 0), 1);
5591 count += n_elts_here;
5592 if (mostly_zeros_p (value))
5593 zero_count += n_elts_here;
5596 /* Clear the entire vector first if there are any missing elements,
5597 or if the incidence of zero elements is >= 75%. */
5598 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5601 if (need_to_clear && size > 0 && !vector)
5604 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5606 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5610 /* Inform later passes that the old value is dead. */
5611 if (!cleared && !vector && REG_P (target))
5612 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5614 /* Store each element of the constructor into the corresponding
5615 element of TARGET, determined by counting the elements. */
5616 for (idx = 0, i = 0;
5617 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5618 idx++, i += bitsize / elt_size)
5620 HOST_WIDE_INT eltpos;
5621 tree value = ce->value;
5623 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5624 if (cleared && initializer_zerop (value))
5628 eltpos = tree_low_cst (ce->index, 1);
5634 /* Vector CONSTRUCTORs should only be built from smaller
5635 vectors in the case of BLKmode vectors. */
5636 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5637 RTVEC_ELT (vector, eltpos)
5638 = expand_normal (value);
5642 enum machine_mode value_mode =
5643 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5644 ? TYPE_MODE (TREE_TYPE (value))
5646 bitpos = eltpos * elt_size;
5647 store_constructor_field (target, bitsize, bitpos,
5648 value_mode, value, type,
5649 cleared, get_alias_set (elttype));
5654 emit_insn (GEN_FCN (icode)
5656 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5665 /* Store the value of EXP (an expression tree)
5666 into a subfield of TARGET which has mode MODE and occupies
5667 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5668 If MODE is VOIDmode, it means that we are storing into a bit-field.
5670 Always return const0_rtx unless we have something particular to
5673 TYPE is the type of the underlying object,
5675 ALIAS_SET is the alias set for the destination. This value will
5676 (in general) be different from that for TARGET, since TARGET is a
5677 reference to the containing structure.
5679 If NONTEMPORAL is true, try generating a nontemporal store. */
5682 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5683 enum machine_mode mode, tree exp, tree type,
5684 alias_set_type alias_set, bool nontemporal)
5686 HOST_WIDE_INT width_mask = 0;
5688 if (TREE_CODE (exp) == ERROR_MARK)
5691 /* If we have nothing to store, do nothing unless the expression has
5694 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5695 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5696 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5698 /* If we are storing into an unaligned field of an aligned union that is
5699 in a register, we may have the mode of TARGET being an integer mode but
5700 MODE == BLKmode. In that case, get an aligned object whose size and
5701 alignment are the same as TARGET and store TARGET into it (we can avoid
5702 the store if the field being stored is the entire width of TARGET). Then
5703 call ourselves recursively to store the field into a BLKmode version of
5704 that object. Finally, load from the object into TARGET. This is not
5705 very efficient in general, but should only be slightly more expensive
5706 than the otherwise-required unaligned accesses. Perhaps this can be
5707 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5708 twice, once with emit_move_insn and once via store_field. */
5711 && (REG_P (target) || GET_CODE (target) == SUBREG))
5713 rtx object = assign_temp (type, 0, 1, 1);
5714 rtx blk_object = adjust_address (object, BLKmode, 0);
5716 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5717 emit_move_insn (object, target);
5719 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5722 emit_move_insn (target, object);
5724 /* We want to return the BLKmode version of the data. */
5728 if (GET_CODE (target) == CONCAT)
5730 /* We're storing into a struct containing a single __complex. */
5732 gcc_assert (!bitpos);
5733 return store_expr (exp, target, 0, nontemporal);
5736 /* If the structure is in a register or if the component
5737 is a bit field, we cannot use addressing to access it.
5738 Use bit-field techniques or SUBREG to store in it. */
5740 if (mode == VOIDmode
5741 || (mode != BLKmode && ! direct_store[(int) mode]
5742 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5743 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5745 || GET_CODE (target) == SUBREG
5746 /* If the field isn't aligned enough to store as an ordinary memref,
5747 store it as a bit field. */
5749 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5750 || bitpos % GET_MODE_ALIGNMENT (mode))
5751 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5752 || (bitpos % BITS_PER_UNIT != 0)))
5753 /* If the RHS and field are a constant size and the size of the
5754 RHS isn't the same size as the bitfield, we must use bitfield
5757 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5758 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5762 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5763 implies a mask operation. If the precision is the same size as
5764 the field we're storing into, that mask is redundant. This is
5765 particularly common with bit field assignments generated by the
5767 if (TREE_CODE (exp) == NOP_EXPR)
5769 tree type = TREE_TYPE (exp);
5770 if (INTEGRAL_TYPE_P (type)
5771 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5772 && bitsize == TYPE_PRECISION (type))
5774 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5775 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5776 exp = TREE_OPERAND (exp, 0);
5780 temp = expand_normal (exp);
5782 /* If BITSIZE is narrower than the size of the type of EXP
5783 we will be narrowing TEMP. Normally, what's wanted are the
5784 low-order bits. However, if EXP's type is a record and this is
5785 big-endian machine, we want the upper BITSIZE bits. */
5786 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5787 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5788 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5789 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5790 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5794 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5796 if (mode != VOIDmode && mode != BLKmode
5797 && mode != TYPE_MODE (TREE_TYPE (exp)))
5798 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5800 /* If the modes of TEMP and TARGET are both BLKmode, both
5801 must be in memory and BITPOS must be aligned on a byte
5802 boundary. If so, we simply do a block copy. Likewise
5803 for a BLKmode-like TARGET. */
5804 if (GET_MODE (temp) == BLKmode
5805 && (GET_MODE (target) == BLKmode
5807 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5808 && (bitpos % BITS_PER_UNIT) == 0
5809 && (bitsize % BITS_PER_UNIT) == 0)))
5811 gcc_assert (MEM_P (target) && MEM_P (temp)
5812 && (bitpos % BITS_PER_UNIT) == 0);
5814 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5815 emit_block_move (target, temp,
5816 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5823 /* Store the value in the bitfield. */
5824 store_bit_field (target, bitsize, bitpos, mode, temp);
5830 /* Now build a reference to just the desired component. */
5831 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5833 if (to_rtx == target)
5834 to_rtx = copy_rtx (to_rtx);
5836 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5837 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5838 set_mem_alias_set (to_rtx, alias_set);
5840 return store_expr (exp, to_rtx, 0, nontemporal);
5844 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5845 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5846 codes and find the ultimate containing object, which we return.
5848 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5849 bit position, and *PUNSIGNEDP to the signedness of the field.
5850 If the position of the field is variable, we store a tree
5851 giving the variable offset (in units) in *POFFSET.
5852 This offset is in addition to the bit position.
5853 If the position is not variable, we store 0 in *POFFSET.
5855 If any of the extraction expressions is volatile,
5856 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5858 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5859 Otherwise, it is a mode that can be used to access the field.
5861 If the field describes a variable-sized object, *PMODE is set to
5862 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5863 this case, but the address of the object can be found.
5865 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5866 look through nodes that serve as markers of a greater alignment than
5867 the one that can be deduced from the expression. These nodes make it
5868 possible for front-ends to prevent temporaries from being created by
5869 the middle-end on alignment considerations. For that purpose, the
5870 normal operating mode at high-level is to always pass FALSE so that
5871 the ultimate containing object is really returned; moreover, the
5872 associated predicate handled_component_p will always return TRUE
5873 on these nodes, thus indicating that they are essentially handled
5874 by get_inner_reference. TRUE should only be passed when the caller
5875 is scanning the expression in order to build another representation
5876 and specifically knows how to handle these nodes; as such, this is
5877 the normal operating mode in the RTL expanders. */
5880 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5881 HOST_WIDE_INT *pbitpos, tree *poffset,
5882 enum machine_mode *pmode, int *punsignedp,
5883 int *pvolatilep, bool keep_aligning)
5886 enum machine_mode mode = VOIDmode;
5887 bool blkmode_bitfield = false;
5888 tree offset = size_zero_node;
5889 tree bit_offset = bitsize_zero_node;
5891 /* First get the mode, signedness, and size. We do this from just the
5892 outermost expression. */
5893 if (TREE_CODE (exp) == COMPONENT_REF)
5895 tree field = TREE_OPERAND (exp, 1);
5896 size_tree = DECL_SIZE (field);
5897 if (!DECL_BIT_FIELD (field))
5898 mode = DECL_MODE (field);
5899 else if (DECL_MODE (field) == BLKmode)
5900 blkmode_bitfield = true;
5902 *punsignedp = DECL_UNSIGNED (field);
5904 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5906 size_tree = TREE_OPERAND (exp, 1);
5907 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5908 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5910 /* For vector types, with the correct size of access, use the mode of
5912 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5913 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5914 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5915 mode = TYPE_MODE (TREE_TYPE (exp));
5919 mode = TYPE_MODE (TREE_TYPE (exp));
5920 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5922 if (mode == BLKmode)
5923 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5925 *pbitsize = GET_MODE_BITSIZE (mode);
5930 if (! host_integerp (size_tree, 1))
5931 mode = BLKmode, *pbitsize = -1;
5933 *pbitsize = tree_low_cst (size_tree, 1);
5936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5937 and find the ultimate containing object. */
5940 switch (TREE_CODE (exp))
5943 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5944 TREE_OPERAND (exp, 2));
5949 tree field = TREE_OPERAND (exp, 1);
5950 tree this_offset = component_ref_field_offset (exp);
5952 /* If this field hasn't been filled in yet, don't go past it.
5953 This should only happen when folding expressions made during
5954 type construction. */
5955 if (this_offset == 0)
5958 offset = size_binop (PLUS_EXPR, offset, this_offset);
5959 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5960 DECL_FIELD_BIT_OFFSET (field));
5962 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5967 case ARRAY_RANGE_REF:
5969 tree index = TREE_OPERAND (exp, 1);
5970 tree low_bound = array_ref_low_bound (exp);
5971 tree unit_size = array_ref_element_size (exp);
5973 /* We assume all arrays have sizes that are a multiple of a byte.
5974 First subtract the lower bound, if any, in the type of the
5975 index, then convert to sizetype and multiply by the size of
5976 the array element. */
5977 if (! integer_zerop (low_bound))
5978 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5981 offset = size_binop (PLUS_EXPR, offset,
5982 size_binop (MULT_EXPR,
5983 fold_convert (sizetype, index),
5992 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5993 bitsize_int (*pbitsize));
5996 case VIEW_CONVERT_EXPR:
5997 if (keep_aligning && STRICT_ALIGNMENT
5998 && (TYPE_ALIGN (TREE_TYPE (exp))
5999 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6000 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6001 < BIGGEST_ALIGNMENT)
6002 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6003 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6011 /* If any reference in the chain is volatile, the effect is volatile. */
6012 if (TREE_THIS_VOLATILE (exp))
6015 exp = TREE_OPERAND (exp, 0);
6019 /* If OFFSET is constant, see if we can return the whole thing as a
6020 constant bit position. Make sure to handle overflow during
6022 if (host_integerp (offset, 0))
6024 double_int tem = double_int_mul (tree_to_double_int (offset),
6025 uhwi_to_double_int (BITS_PER_UNIT));
6026 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6027 if (double_int_fits_in_shwi_p (tem))
6029 *pbitpos = double_int_to_shwi (tem);
6030 *poffset = offset = NULL_TREE;
6034 /* Otherwise, split it up. */
6037 *pbitpos = tree_low_cst (bit_offset, 0);
6041 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6042 if (mode == VOIDmode
6044 && (*pbitpos % BITS_PER_UNIT) == 0
6045 && (*pbitsize % BITS_PER_UNIT) == 0)
6053 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6054 look for whether EXP or any nested component-refs within EXP is marked
6058 contains_packed_reference (const_tree exp)
6060 bool packed_p = false;
6064 switch (TREE_CODE (exp))
6068 tree field = TREE_OPERAND (exp, 1);
6069 packed_p = DECL_PACKED (field)
6070 || TYPE_PACKED (TREE_TYPE (field))
6071 || TYPE_PACKED (TREE_TYPE (exp));
6079 case ARRAY_RANGE_REF:
6082 case VIEW_CONVERT_EXPR:
6088 exp = TREE_OPERAND (exp, 0);
6094 /* Return a tree of sizetype representing the size, in bytes, of the element
6095 of EXP, an ARRAY_REF. */
6098 array_ref_element_size (tree exp)
6100 tree aligned_size = TREE_OPERAND (exp, 3);
6101 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6103 /* If a size was specified in the ARRAY_REF, it's the size measured
6104 in alignment units of the element type. So multiply by that value. */
6107 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6108 sizetype from another type of the same width and signedness. */
6109 if (TREE_TYPE (aligned_size) != sizetype)
6110 aligned_size = fold_convert (sizetype, aligned_size);
6111 return size_binop (MULT_EXPR, aligned_size,
6112 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6115 /* Otherwise, take the size from that of the element type. Substitute
6116 any PLACEHOLDER_EXPR that we have. */
6118 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6121 /* Return a tree representing the lower bound of the array mentioned in
6122 EXP, an ARRAY_REF. */
6125 array_ref_low_bound (tree exp)
6127 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6129 /* If a lower bound is specified in EXP, use it. */
6130 if (TREE_OPERAND (exp, 2))
6131 return TREE_OPERAND (exp, 2);
6133 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6134 substituting for a PLACEHOLDER_EXPR as needed. */
6135 if (domain_type && TYPE_MIN_VALUE (domain_type))
6136 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6138 /* Otherwise, return a zero of the appropriate type. */
6139 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6142 /* Return a tree representing the upper bound of the array mentioned in
6143 EXP, an ARRAY_REF. */
6146 array_ref_up_bound (tree exp)
6148 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6150 /* If there is a domain type and it has an upper bound, use it, substituting
6151 for a PLACEHOLDER_EXPR as needed. */
6152 if (domain_type && TYPE_MAX_VALUE (domain_type))
6153 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6155 /* Otherwise fail. */
6159 /* Return a tree representing the offset, in bytes, of the field referenced
6160 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6163 component_ref_field_offset (tree exp)
6165 tree aligned_offset = TREE_OPERAND (exp, 2);
6166 tree field = TREE_OPERAND (exp, 1);
6168 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6169 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6173 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6174 sizetype from another type of the same width and signedness. */
6175 if (TREE_TYPE (aligned_offset) != sizetype)
6176 aligned_offset = fold_convert (sizetype, aligned_offset);
6177 return size_binop (MULT_EXPR, aligned_offset,
6178 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6181 /* Otherwise, take the offset from that of the field. Substitute
6182 any PLACEHOLDER_EXPR that we have. */
6184 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6187 /* Return 1 if T is an expression that get_inner_reference handles. */
6190 handled_component_p (const_tree t)
6192 switch (TREE_CODE (t))
6197 case ARRAY_RANGE_REF:
6198 case VIEW_CONVERT_EXPR:
6208 /* Given an rtx VALUE that may contain additions and multiplications, return
6209 an equivalent value that just refers to a register, memory, or constant.
6210 This is done by generating instructions to perform the arithmetic and
6211 returning a pseudo-register containing the value.
6213 The returned value may be a REG, SUBREG, MEM or constant. */
6216 force_operand (rtx value, rtx target)
6219 /* Use subtarget as the target for operand 0 of a binary operation. */
6220 rtx subtarget = get_subtarget (target);
6221 enum rtx_code code = GET_CODE (value);
6223 /* Check for subreg applied to an expression produced by loop optimizer. */
6225 && !REG_P (SUBREG_REG (value))
6226 && !MEM_P (SUBREG_REG (value)))
6229 = simplify_gen_subreg (GET_MODE (value),
6230 force_reg (GET_MODE (SUBREG_REG (value)),
6231 force_operand (SUBREG_REG (value),
6233 GET_MODE (SUBREG_REG (value)),
6234 SUBREG_BYTE (value));
6235 code = GET_CODE (value);
6238 /* Check for a PIC address load. */
6239 if ((code == PLUS || code == MINUS)
6240 && XEXP (value, 0) == pic_offset_table_rtx
6241 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6242 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6243 || GET_CODE (XEXP (value, 1)) == CONST))
6246 subtarget = gen_reg_rtx (GET_MODE (value));
6247 emit_move_insn (subtarget, value);
6251 if (ARITHMETIC_P (value))
6253 op2 = XEXP (value, 1);
6254 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6256 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6259 op2 = negate_rtx (GET_MODE (value), op2);
6262 /* Check for an addition with OP2 a constant integer and our first
6263 operand a PLUS of a virtual register and something else. In that
6264 case, we want to emit the sum of the virtual register and the
6265 constant first and then add the other value. This allows virtual
6266 register instantiation to simply modify the constant rather than
6267 creating another one around this addition. */
6268 if (code == PLUS && GET_CODE (op2) == CONST_INT
6269 && GET_CODE (XEXP (value, 0)) == PLUS
6270 && REG_P (XEXP (XEXP (value, 0), 0))
6271 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6272 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6274 rtx temp = expand_simple_binop (GET_MODE (value), code,
6275 XEXP (XEXP (value, 0), 0), op2,
6276 subtarget, 0, OPTAB_LIB_WIDEN);
6277 return expand_simple_binop (GET_MODE (value), code, temp,
6278 force_operand (XEXP (XEXP (value,
6280 target, 0, OPTAB_LIB_WIDEN);
6283 op1 = force_operand (XEXP (value, 0), subtarget);
6284 op2 = force_operand (op2, NULL_RTX);
6288 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6290 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6291 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6292 target, 1, OPTAB_LIB_WIDEN);
6294 return expand_divmod (0,
6295 FLOAT_MODE_P (GET_MODE (value))
6296 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6297 GET_MODE (value), op1, op2, target, 0);
6299 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6302 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6305 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6308 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6309 target, 0, OPTAB_LIB_WIDEN);
6311 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6312 target, 1, OPTAB_LIB_WIDEN);
6315 if (UNARY_P (value))
6318 target = gen_reg_rtx (GET_MODE (value));
6319 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6326 case FLOAT_TRUNCATE:
6327 convert_move (target, op1, code == ZERO_EXTEND);
6332 expand_fix (target, op1, code == UNSIGNED_FIX);
6336 case UNSIGNED_FLOAT:
6337 expand_float (target, op1, code == UNSIGNED_FLOAT);
6341 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6345 #ifdef INSN_SCHEDULING
6346 /* On machines that have insn scheduling, we want all memory reference to be
6347 explicit, so we need to deal with such paradoxical SUBREGs. */
6348 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6349 && (GET_MODE_SIZE (GET_MODE (value))
6350 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6352 = simplify_gen_subreg (GET_MODE (value),
6353 force_reg (GET_MODE (SUBREG_REG (value)),
6354 force_operand (SUBREG_REG (value),
6356 GET_MODE (SUBREG_REG (value)),
6357 SUBREG_BYTE (value));
6363 /* Subroutine of expand_expr: return nonzero iff there is no way that
6364 EXP can reference X, which is being modified. TOP_P is nonzero if this
6365 call is going to be used to determine whether we need a temporary
6366 for EXP, as opposed to a recursive call to this function.
6368 It is always safe for this routine to return zero since it merely
6369 searches for optimization opportunities. */
6372 safe_from_p (const_rtx x, tree exp, int top_p)
6378 /* If EXP has varying size, we MUST use a target since we currently
6379 have no way of allocating temporaries of variable size
6380 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6381 So we assume here that something at a higher level has prevented a
6382 clash. This is somewhat bogus, but the best we can do. Only
6383 do this when X is BLKmode and when we are at the top level. */
6384 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6385 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6386 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6387 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6388 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6390 && GET_MODE (x) == BLKmode)
6391 /* If X is in the outgoing argument area, it is always safe. */
6393 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6394 || (GET_CODE (XEXP (x, 0)) == PLUS
6395 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6398 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6399 find the underlying pseudo. */
6400 if (GET_CODE (x) == SUBREG)
6403 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6407 /* Now look at our tree code and possibly recurse. */
6408 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6410 case tcc_declaration:
6411 exp_rtl = DECL_RTL_IF_SET (exp);
6417 case tcc_exceptional:
6418 if (TREE_CODE (exp) == TREE_LIST)
6422 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6424 exp = TREE_CHAIN (exp);
6427 if (TREE_CODE (exp) != TREE_LIST)
6428 return safe_from_p (x, exp, 0);
6431 else if (TREE_CODE (exp) == CONSTRUCTOR)
6433 constructor_elt *ce;
6434 unsigned HOST_WIDE_INT idx;
6437 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6439 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6440 || !safe_from_p (x, ce->value, 0))
6444 else if (TREE_CODE (exp) == ERROR_MARK)
6445 return 1; /* An already-visited SAVE_EXPR? */
6450 /* The only case we look at here is the DECL_INITIAL inside a
6452 return (TREE_CODE (exp) != DECL_EXPR
6453 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6454 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6455 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6458 case tcc_comparison:
6459 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6464 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6466 case tcc_expression:
6469 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6470 the expression. If it is set, we conflict iff we are that rtx or
6471 both are in memory. Otherwise, we check all operands of the
6472 expression recursively. */
6474 switch (TREE_CODE (exp))
6477 /* If the operand is static or we are static, we can't conflict.
6478 Likewise if we don't conflict with the operand at all. */
6479 if (staticp (TREE_OPERAND (exp, 0))
6480 || TREE_STATIC (exp)
6481 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6484 /* Otherwise, the only way this can conflict is if we are taking
6485 the address of a DECL a that address if part of X, which is
6487 exp = TREE_OPERAND (exp, 0);
6490 if (!DECL_RTL_SET_P (exp)
6491 || !MEM_P (DECL_RTL (exp)))
6494 exp_rtl = XEXP (DECL_RTL (exp), 0);
6498 case MISALIGNED_INDIRECT_REF:
6499 case ALIGN_INDIRECT_REF:
6502 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6503 get_alias_set (exp)))
6508 /* Assume that the call will clobber all hard registers and
6510 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6515 case WITH_CLEANUP_EXPR:
6516 case CLEANUP_POINT_EXPR:
6517 /* Lowered by gimplify.c. */
6521 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6527 /* If we have an rtx, we do not need to scan our operands. */
6531 nops = TREE_OPERAND_LENGTH (exp);
6532 for (i = 0; i < nops; i++)
6533 if (TREE_OPERAND (exp, i) != 0
6534 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6540 /* Should never get a type here. */
6544 /* If we have an rtl, find any enclosed object. Then see if we conflict
6548 if (GET_CODE (exp_rtl) == SUBREG)
6550 exp_rtl = SUBREG_REG (exp_rtl);
6552 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6556 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6557 are memory and they conflict. */
6558 return ! (rtx_equal_p (x, exp_rtl)
6559 || (MEM_P (x) && MEM_P (exp_rtl)
6560 && true_dependence (exp_rtl, VOIDmode, x,
6561 rtx_addr_varies_p)));
6564 /* If we reach here, it is safe. */
6569 /* Return the highest power of two that EXP is known to be a multiple of.
6570 This is used in updating alignment of MEMs in array references. */
6572 unsigned HOST_WIDE_INT
6573 highest_pow2_factor (const_tree exp)
6575 unsigned HOST_WIDE_INT c0, c1;
6577 switch (TREE_CODE (exp))
6580 /* We can find the lowest bit that's a one. If the low
6581 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6582 We need to handle this case since we can find it in a COND_EXPR,
6583 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6584 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6586 if (TREE_OVERFLOW (exp))
6587 return BIGGEST_ALIGNMENT;
6590 /* Note: tree_low_cst is intentionally not used here,
6591 we don't care about the upper bits. */
6592 c0 = TREE_INT_CST_LOW (exp);
6594 return c0 ? c0 : BIGGEST_ALIGNMENT;
6598 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6599 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6600 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6601 return MIN (c0, c1);
6604 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6605 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6608 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6610 if (integer_pow2p (TREE_OPERAND (exp, 1))
6611 && host_integerp (TREE_OPERAND (exp, 1), 1))
6613 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6614 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6615 return MAX (1, c0 / c1);
6620 /* The highest power of two of a bit-and expression is the maximum of
6621 that of its operands. We typically get here for a complex LHS and
6622 a constant negative power of two on the RHS to force an explicit
6623 alignment, so don't bother looking at the LHS. */
6624 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6628 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6631 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6634 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6635 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6636 return MIN (c0, c1);
6645 /* Similar, except that the alignment requirements of TARGET are
6646 taken into account. Assume it is at least as aligned as its
6647 type, unless it is a COMPONENT_REF in which case the layout of
6648 the structure gives the alignment. */
6650 static unsigned HOST_WIDE_INT
6651 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6653 unsigned HOST_WIDE_INT target_align, factor;
6655 factor = highest_pow2_factor (exp);
6656 if (TREE_CODE (target) == COMPONENT_REF)
6657 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6659 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6660 return MAX (factor, target_align);
6663 /* Return &VAR expression for emulated thread local VAR. */
6666 emutls_var_address (tree var)
6668 tree emuvar = emutls_decl (var);
6669 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6670 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6671 tree arglist = build_tree_list (NULL_TREE, arg);
6672 tree call = build_function_call_expr (fn, arglist);
6673 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6677 /* Subroutine of expand_expr. Expand the two operands of a binary
6678 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6679 The value may be stored in TARGET if TARGET is nonzero. The
6680 MODIFIER argument is as documented by expand_expr. */
6683 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6684 enum expand_modifier modifier)
6686 if (! safe_from_p (target, exp1, 1))
6688 if (operand_equal_p (exp0, exp1, 0))
6690 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6691 *op1 = copy_rtx (*op0);
6695 /* If we need to preserve evaluation order, copy exp0 into its own
6696 temporary variable so that it can't be clobbered by exp1. */
6697 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6698 exp0 = save_expr (exp0);
6699 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6700 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6705 /* Return a MEM that contains constant EXP. DEFER is as for
6706 output_constant_def and MODIFIER is as for expand_expr. */
6709 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6713 mem = output_constant_def (exp, defer);
6714 if (modifier != EXPAND_INITIALIZER)
6715 mem = use_anchored_address (mem);
6719 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6720 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6723 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6724 enum expand_modifier modifier)
6726 rtx result, subtarget;
6728 HOST_WIDE_INT bitsize, bitpos;
6729 int volatilep, unsignedp;
6730 enum machine_mode mode1;
6732 /* If we are taking the address of a constant and are at the top level,
6733 we have to use output_constant_def since we can't call force_const_mem
6735 /* ??? This should be considered a front-end bug. We should not be
6736 generating ADDR_EXPR of something that isn't an LVALUE. The only
6737 exception here is STRING_CST. */
6738 if (CONSTANT_CLASS_P (exp))
6739 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6741 /* Everything must be something allowed by is_gimple_addressable. */
6742 switch (TREE_CODE (exp))
6745 /* This case will happen via recursion for &a->b. */
6746 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6749 /* Recurse and make the output_constant_def clause above handle this. */
6750 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6754 /* The real part of the complex number is always first, therefore
6755 the address is the same as the address of the parent object. */
6758 inner = TREE_OPERAND (exp, 0);
6762 /* The imaginary part of the complex number is always second.
6763 The expression is therefore always offset by the size of the
6766 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6767 inner = TREE_OPERAND (exp, 0);
6771 /* TLS emulation hook - replace __thread VAR's &VAR with
6772 __emutls_get_address (&_emutls.VAR). */
6773 if (! targetm.have_tls
6774 && TREE_CODE (exp) == VAR_DECL
6775 && DECL_THREAD_LOCAL_P (exp))
6777 exp = emutls_var_address (exp);
6778 return expand_expr (exp, target, tmode, modifier);
6783 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6784 expand_expr, as that can have various side effects; LABEL_DECLs for
6785 example, may not have their DECL_RTL set yet. Expand the rtl of
6786 CONSTRUCTORs too, which should yield a memory reference for the
6787 constructor's contents. Assume language specific tree nodes can
6788 be expanded in some interesting way. */
6790 || TREE_CODE (exp) == CONSTRUCTOR
6791 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6793 result = expand_expr (exp, target, tmode,
6794 modifier == EXPAND_INITIALIZER
6795 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6797 /* If the DECL isn't in memory, then the DECL wasn't properly
6798 marked TREE_ADDRESSABLE, which will be either a front-end
6799 or a tree optimizer bug. */
6800 gcc_assert (MEM_P (result));
6801 result = XEXP (result, 0);
6803 /* ??? Is this needed anymore? */
6804 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6806 assemble_external (exp);
6807 TREE_USED (exp) = 1;
6810 if (modifier != EXPAND_INITIALIZER
6811 && modifier != EXPAND_CONST_ADDRESS)
6812 result = force_operand (result, target);
6816 /* Pass FALSE as the last argument to get_inner_reference although
6817 we are expanding to RTL. The rationale is that we know how to
6818 handle "aligning nodes" here: we can just bypass them because
6819 they won't change the final object whose address will be returned
6820 (they actually exist only for that purpose). */
6821 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6822 &mode1, &unsignedp, &volatilep, false);
6826 /* We must have made progress. */
6827 gcc_assert (inner != exp);
6829 subtarget = offset || bitpos ? NULL_RTX : target;
6830 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6836 if (modifier != EXPAND_NORMAL)
6837 result = force_operand (result, NULL);
6838 tmp = expand_expr (offset, NULL_RTX, tmode,
6839 modifier == EXPAND_INITIALIZER
6840 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6842 result = convert_memory_address (tmode, result);
6843 tmp = convert_memory_address (tmode, tmp);
6845 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6846 result = gen_rtx_PLUS (tmode, result, tmp);
6849 subtarget = bitpos ? NULL_RTX : target;
6850 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6851 1, OPTAB_LIB_WIDEN);
6857 /* Someone beforehand should have rejected taking the address
6858 of such an object. */
6859 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6861 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6862 if (modifier < EXPAND_SUM)
6863 result = force_operand (result, target);
6869 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6870 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6873 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6874 enum expand_modifier modifier)
6876 enum machine_mode rmode;
6879 /* Target mode of VOIDmode says "whatever's natural". */
6880 if (tmode == VOIDmode)
6881 tmode = TYPE_MODE (TREE_TYPE (exp));
6883 /* We can get called with some Weird Things if the user does silliness
6884 like "(short) &a". In that case, convert_memory_address won't do
6885 the right thing, so ignore the given target mode. */
6886 if (tmode != Pmode && tmode != ptr_mode)
6889 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6892 /* Despite expand_expr claims concerning ignoring TMODE when not
6893 strictly convenient, stuff breaks if we don't honor it. Note
6894 that combined with the above, we only do this for pointer modes. */
6895 rmode = GET_MODE (result);
6896 if (rmode == VOIDmode)
6899 result = convert_memory_address (tmode, result);
6904 /* Generate code for computing CONSTRUCTOR EXP.
6905 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6906 is TRUE, instead of creating a temporary variable in memory
6907 NULL is returned and the caller needs to handle it differently. */
6910 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6911 bool avoid_temp_mem)
6913 tree type = TREE_TYPE (exp);
6914 enum machine_mode mode = TYPE_MODE (type);
6916 /* Try to avoid creating a temporary at all. This is possible
6917 if all of the initializer is zero.
6918 FIXME: try to handle all [0..255] initializers we can handle
6920 if (TREE_STATIC (exp)
6921 && !TREE_ADDRESSABLE (exp)
6922 && target != 0 && mode == BLKmode
6923 && all_zeros_p (exp))
6925 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6929 /* All elts simple constants => refer to a constant in memory. But
6930 if this is a non-BLKmode mode, let it store a field at a time
6931 since that should make a CONST_INT or CONST_DOUBLE when we
6932 fold. Likewise, if we have a target we can use, it is best to
6933 store directly into the target unless the type is large enough
6934 that memcpy will be used. If we are making an initializer and
6935 all operands are constant, put it in memory as well.
6937 FIXME: Avoid trying to fill vector constructors piece-meal.
6938 Output them with output_constant_def below unless we're sure
6939 they're zeros. This should go away when vector initializers
6940 are treated like VECTOR_CST instead of arrays. */
6941 if ((TREE_STATIC (exp)
6942 && ((mode == BLKmode
6943 && ! (target != 0 && safe_from_p (target, exp, 1)))
6944 || TREE_ADDRESSABLE (exp)
6945 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6946 && (! MOVE_BY_PIECES_P
6947 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6949 && ! mostly_zeros_p (exp))))
6950 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6951 && TREE_CONSTANT (exp)))
6958 constructor = expand_expr_constant (exp, 1, modifier);
6960 if (modifier != EXPAND_CONST_ADDRESS
6961 && modifier != EXPAND_INITIALIZER
6962 && modifier != EXPAND_SUM)
6963 constructor = validize_mem (constructor);
6968 /* Handle calls that pass values in multiple non-contiguous
6969 locations. The Irix 6 ABI has examples of this. */
6970 if (target == 0 || ! safe_from_p (target, exp, 1)
6971 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6977 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6978 | (TREE_READONLY (exp)
6979 * TYPE_QUAL_CONST))),
6980 0, TREE_ADDRESSABLE (exp), 1);
6983 store_constructor (exp, target, 0, int_expr_size (exp));
6988 /* expand_expr: generate code for computing expression EXP.
6989 An rtx for the computed value is returned. The value is never null.
6990 In the case of a void EXP, const0_rtx is returned.
6992 The value may be stored in TARGET if TARGET is nonzero.
6993 TARGET is just a suggestion; callers must assume that
6994 the rtx returned may not be the same as TARGET.
6996 If TARGET is CONST0_RTX, it means that the value will be ignored.
6998 If TMODE is not VOIDmode, it suggests generating the
6999 result in mode TMODE. But this is done only when convenient.
7000 Otherwise, TMODE is ignored and the value generated in its natural mode.
7001 TMODE is just a suggestion; callers must assume that
7002 the rtx returned may not have mode TMODE.
7004 Note that TARGET may have neither TMODE nor MODE. In that case, it
7005 probably will not be used.
7007 If MODIFIER is EXPAND_SUM then when EXP is an addition
7008 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7009 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7010 products as above, or REG or MEM, or constant.
7011 Ordinarily in such cases we would output mul or add instructions
7012 and then return a pseudo reg containing the sum.
7014 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7015 it also marks a label as absolutely required (it can't be dead).
7016 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7017 This is used for outputting expressions used in initializers.
7019 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7020 with a constant address even if that address is not normally legitimate.
7021 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7023 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7024 a call parameter. Such targets require special care as we haven't yet
7025 marked TARGET so that it's safe from being trashed by libcalls. We
7026 don't want to use TARGET for anything but the final result;
7027 Intermediate values must go elsewhere. Additionally, calls to
7028 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7030 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7031 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7032 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7033 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7036 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7037 enum expand_modifier, rtx *);
7040 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7041 enum expand_modifier modifier, rtx *alt_rtl)
7044 rtx ret, last = NULL;
7046 /* Handle ERROR_MARK before anybody tries to access its type. */
7047 if (TREE_CODE (exp) == ERROR_MARK
7048 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7050 ret = CONST0_RTX (tmode);
7051 return ret ? ret : const0_rtx;
7054 if (flag_non_call_exceptions)
7056 rn = lookup_expr_eh_region (exp);
7058 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7060 last = get_last_insn ();
7063 /* If this is an expression of some kind and it has an associated line
7064 number, then emit the line number before expanding the expression.
7066 We need to save and restore the file and line information so that
7067 errors discovered during expansion are emitted with the right
7068 information. It would be better of the diagnostic routines
7069 used the file/line information embedded in the tree nodes rather
7071 if (cfun && EXPR_HAS_LOCATION (exp))
7073 location_t saved_location = input_location;
7074 input_location = EXPR_LOCATION (exp);
7075 set_curr_insn_source_location (input_location);
7077 /* Record where the insns produced belong. */
7078 set_curr_insn_block (TREE_BLOCK (exp));
7080 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7082 input_location = saved_location;
7086 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7089 /* If using non-call exceptions, mark all insns that may trap.
7090 expand_call() will mark CALL_INSNs before we get to this code,
7091 but it doesn't handle libcalls, and these may trap. */
7095 for (insn = next_real_insn (last); insn;
7096 insn = next_real_insn (insn))
7098 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7099 /* If we want exceptions for non-call insns, any
7100 may_trap_p instruction may throw. */
7101 && GET_CODE (PATTERN (insn)) != CLOBBER
7102 && GET_CODE (PATTERN (insn)) != USE
7103 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7104 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7112 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7113 enum expand_modifier modifier, rtx *alt_rtl)
7115 rtx op0, op1, op2, temp, decl_rtl;
7118 enum machine_mode mode;
7119 enum tree_code code = TREE_CODE (exp);
7121 rtx subtarget, original_target;
7123 tree context, subexp0, subexp1;
7124 bool reduce_bit_field;
7125 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7126 ? reduce_to_bit_field_precision ((expr), \
7131 type = TREE_TYPE (exp);
7132 mode = TYPE_MODE (type);
7133 unsignedp = TYPE_UNSIGNED (type);
7135 ignore = (target == const0_rtx
7136 || ((code == NOP_EXPR || code == CONVERT_EXPR
7137 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7138 && TREE_CODE (type) == VOID_TYPE));
7140 /* An operation in what may be a bit-field type needs the
7141 result to be reduced to the precision of the bit-field type,
7142 which is narrower than that of the type's mode. */
7143 reduce_bit_field = (!ignore
7144 && TREE_CODE (type) == INTEGER_TYPE
7145 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7147 /* If we are going to ignore this result, we need only do something
7148 if there is a side-effect somewhere in the expression. If there
7149 is, short-circuit the most common cases here. Note that we must
7150 not call expand_expr with anything but const0_rtx in case this
7151 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7155 if (! TREE_SIDE_EFFECTS (exp))
7158 /* Ensure we reference a volatile object even if value is ignored, but
7159 don't do this if all we are doing is taking its address. */
7160 if (TREE_THIS_VOLATILE (exp)
7161 && TREE_CODE (exp) != FUNCTION_DECL
7162 && mode != VOIDmode && mode != BLKmode
7163 && modifier != EXPAND_CONST_ADDRESS)
7165 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7167 temp = copy_to_reg (temp);
7171 if (TREE_CODE_CLASS (code) == tcc_unary
7172 || code == COMPONENT_REF || code == INDIRECT_REF)
7173 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7176 else if (TREE_CODE_CLASS (code) == tcc_binary
7177 || TREE_CODE_CLASS (code) == tcc_comparison
7178 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7180 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7181 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7184 else if (code == BIT_FIELD_REF)
7186 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7187 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7188 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7195 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7198 /* Use subtarget as the target for operand 0 of a binary operation. */
7199 subtarget = get_subtarget (target);
7200 original_target = target;
7206 tree function = decl_function_context (exp);
7208 temp = label_rtx (exp);
7209 temp = gen_rtx_LABEL_REF (Pmode, temp);
7211 if (function != current_function_decl
7213 LABEL_REF_NONLOCAL_P (temp) = 1;
7215 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7220 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7225 /* If a static var's type was incomplete when the decl was written,
7226 but the type is complete now, lay out the decl now. */
7227 if (DECL_SIZE (exp) == 0
7228 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7229 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7230 layout_decl (exp, 0);
7232 /* TLS emulation hook - replace __thread vars with
7233 *__emutls_get_address (&_emutls.var). */
7234 if (! targetm.have_tls
7235 && TREE_CODE (exp) == VAR_DECL
7236 && DECL_THREAD_LOCAL_P (exp))
7238 exp = build_fold_indirect_ref (emutls_var_address (exp));
7239 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7242 /* ... fall through ... */
7246 decl_rtl = DECL_RTL (exp);
7247 gcc_assert (decl_rtl);
7248 decl_rtl = copy_rtx (decl_rtl);
7250 /* Ensure variable marked as used even if it doesn't go through
7251 a parser. If it hasn't be used yet, write out an external
7253 if (! TREE_USED (exp))
7255 assemble_external (exp);
7256 TREE_USED (exp) = 1;
7259 /* Show we haven't gotten RTL for this yet. */
7262 /* Variables inherited from containing functions should have
7263 been lowered by this point. */
7264 context = decl_function_context (exp);
7265 gcc_assert (!context
7266 || context == current_function_decl
7267 || TREE_STATIC (exp)
7268 /* ??? C++ creates functions that are not TREE_STATIC. */
7269 || TREE_CODE (exp) == FUNCTION_DECL);
7271 /* This is the case of an array whose size is to be determined
7272 from its initializer, while the initializer is still being parsed.
7275 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7276 temp = validize_mem (decl_rtl);
7278 /* If DECL_RTL is memory, we are in the normal case and the
7279 address is not valid, get the address into a register. */
7281 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7284 *alt_rtl = decl_rtl;
7285 decl_rtl = use_anchored_address (decl_rtl);
7286 if (modifier != EXPAND_CONST_ADDRESS
7287 && modifier != EXPAND_SUM
7288 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7289 temp = replace_equiv_address (decl_rtl,
7290 copy_rtx (XEXP (decl_rtl, 0)));
7293 /* If we got something, return it. But first, set the alignment
7294 if the address is a register. */
7297 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7298 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7303 /* If the mode of DECL_RTL does not match that of the decl, it
7304 must be a promoted value. We return a SUBREG of the wanted mode,
7305 but mark it so that we know that it was already extended. */
7307 if (REG_P (decl_rtl)
7308 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7310 enum machine_mode pmode;
7312 /* Get the signedness used for this variable. Ensure we get the
7313 same mode we got when the variable was declared. */
7314 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7315 (TREE_CODE (exp) == RESULT_DECL
7316 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7317 gcc_assert (GET_MODE (decl_rtl) == pmode);
7319 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7320 SUBREG_PROMOTED_VAR_P (temp) = 1;
7321 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7328 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7329 TREE_INT_CST_HIGH (exp), mode);
7335 tree tmp = NULL_TREE;
7336 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7337 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7338 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7339 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7340 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7341 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7342 return const_vector_from_tree (exp);
7343 if (GET_MODE_CLASS (mode) == MODE_INT)
7345 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7347 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7350 tmp = build_constructor_from_list (type,
7351 TREE_VECTOR_CST_ELTS (exp));
7352 return expand_expr (tmp, ignore ? const0_rtx : target,
7357 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7360 /* If optimized, generate immediate CONST_DOUBLE
7361 which will be turned into memory by reload if necessary.
7363 We used to force a register so that loop.c could see it. But
7364 this does not allow gen_* patterns to perform optimizations with
7365 the constants. It also produces two insns in cases like "x = 1.0;".
7366 On most machines, floating-point constants are not permitted in
7367 many insns, so we'd end up copying it to a register in any case.
7369 Now, we do the copying in expand_binop, if appropriate. */
7370 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7371 TYPE_MODE (TREE_TYPE (exp)));
7374 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7375 TYPE_MODE (TREE_TYPE (exp)));
7378 /* Handle evaluating a complex constant in a CONCAT target. */
7379 if (original_target && GET_CODE (original_target) == CONCAT)
7381 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7384 rtarg = XEXP (original_target, 0);
7385 itarg = XEXP (original_target, 1);
7387 /* Move the real and imaginary parts separately. */
7388 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7389 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7392 emit_move_insn (rtarg, op0);
7394 emit_move_insn (itarg, op1);
7396 return original_target;
7399 /* ... fall through ... */
7402 temp = expand_expr_constant (exp, 1, modifier);
7404 /* temp contains a constant address.
7405 On RISC machines where a constant address isn't valid,
7406 make some insns to get that address into a register. */
7407 if (modifier != EXPAND_CONST_ADDRESS
7408 && modifier != EXPAND_INITIALIZER
7409 && modifier != EXPAND_SUM
7410 && ! memory_address_p (mode, XEXP (temp, 0)))
7411 return replace_equiv_address (temp,
7412 copy_rtx (XEXP (temp, 0)));
7417 tree val = TREE_OPERAND (exp, 0);
7418 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7420 if (!SAVE_EXPR_RESOLVED_P (exp))
7422 /* We can indeed still hit this case, typically via builtin
7423 expanders calling save_expr immediately before expanding
7424 something. Assume this means that we only have to deal
7425 with non-BLKmode values. */
7426 gcc_assert (GET_MODE (ret) != BLKmode);
7428 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7429 DECL_ARTIFICIAL (val) = 1;
7430 DECL_IGNORED_P (val) = 1;
7431 TREE_OPERAND (exp, 0) = val;
7432 SAVE_EXPR_RESOLVED_P (exp) = 1;
7434 if (!CONSTANT_P (ret))
7435 ret = copy_to_reg (ret);
7436 SET_DECL_RTL (val, ret);
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7444 expand_goto (TREE_OPERAND (exp, 0));
7446 expand_computed_goto (TREE_OPERAND (exp, 0));
7450 /* If we don't need the result, just ensure we evaluate any
7454 unsigned HOST_WIDE_INT idx;
7457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7458 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7463 return expand_constructor (exp, target, modifier, false);
7465 case MISALIGNED_INDIRECT_REF:
7466 case ALIGN_INDIRECT_REF:
7469 tree exp1 = TREE_OPERAND (exp, 0);
7471 if (modifier != EXPAND_WRITE)
7475 t = fold_read_from_constant_string (exp);
7477 return expand_expr (t, target, tmode, modifier);
7480 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7481 op0 = memory_address (mode, op0);
7483 if (code == ALIGN_INDIRECT_REF)
7485 int align = TYPE_ALIGN_UNIT (type);
7486 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7487 op0 = memory_address (mode, op0);
7490 temp = gen_rtx_MEM (mode, op0);
7492 set_mem_attributes (temp, exp, 0);
7494 /* Resolve the misalignment now, so that we don't have to remember
7495 to resolve it later. Of course, this only works for reads. */
7496 /* ??? When we get around to supporting writes, we'll have to handle
7497 this in store_expr directly. The vectorizer isn't generating
7498 those yet, however. */
7499 if (code == MISALIGNED_INDIRECT_REF)
7504 gcc_assert (modifier == EXPAND_NORMAL
7505 || modifier == EXPAND_STACK_PARM);
7507 /* The vectorizer should have already checked the mode. */
7508 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7509 gcc_assert (icode != CODE_FOR_nothing);
7511 /* We've already validated the memory, and we're creating a
7512 new pseudo destination. The predicates really can't fail. */
7513 reg = gen_reg_rtx (mode);
7515 /* Nor can the insn generator. */
7516 insn = GEN_FCN (icode) (reg, temp);
7525 case TARGET_MEM_REF:
7527 struct mem_address addr;
7529 get_address_description (exp, &addr);
7530 op0 = addr_for_mem_ref (&addr, true);
7531 op0 = memory_address (mode, op0);
7532 temp = gen_rtx_MEM (mode, op0);
7533 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7540 tree array = TREE_OPERAND (exp, 0);
7541 tree index = TREE_OPERAND (exp, 1);
7543 /* Fold an expression like: "foo"[2].
7544 This is not done in fold so it won't happen inside &.
7545 Don't fold if this is for wide characters since it's too
7546 difficult to do correctly and this is a very rare case. */
7548 if (modifier != EXPAND_CONST_ADDRESS
7549 && modifier != EXPAND_INITIALIZER
7550 && modifier != EXPAND_MEMORY)
7552 tree t = fold_read_from_constant_string (exp);
7555 return expand_expr (t, target, tmode, modifier);
7558 /* If this is a constant index into a constant array,
7559 just get the value from the array. Handle both the cases when
7560 we have an explicit constructor and when our operand is a variable
7561 that was declared const. */
7563 if (modifier != EXPAND_CONST_ADDRESS
7564 && modifier != EXPAND_INITIALIZER
7565 && modifier != EXPAND_MEMORY
7566 && TREE_CODE (array) == CONSTRUCTOR
7567 && ! TREE_SIDE_EFFECTS (array)
7568 && TREE_CODE (index) == INTEGER_CST)
7570 unsigned HOST_WIDE_INT ix;
7573 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7575 if (tree_int_cst_equal (field, index))
7577 if (!TREE_SIDE_EFFECTS (value))
7578 return expand_expr (fold (value), target, tmode, modifier);
7583 else if (optimize >= 1
7584 && modifier != EXPAND_CONST_ADDRESS
7585 && modifier != EXPAND_INITIALIZER
7586 && modifier != EXPAND_MEMORY
7587 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7588 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7589 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7590 && targetm.binds_local_p (array))
7592 if (TREE_CODE (index) == INTEGER_CST)
7594 tree init = DECL_INITIAL (array);
7596 if (TREE_CODE (init) == CONSTRUCTOR)
7598 unsigned HOST_WIDE_INT ix;
7601 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7603 if (tree_int_cst_equal (field, index))
7605 if (TREE_SIDE_EFFECTS (value))
7608 if (TREE_CODE (value) == CONSTRUCTOR)
7610 /* If VALUE is a CONSTRUCTOR, this
7611 optimization is only useful if
7612 this doesn't store the CONSTRUCTOR
7613 into memory. If it does, it is more
7614 efficient to just load the data from
7615 the array directly. */
7616 rtx ret = expand_constructor (value, target,
7618 if (ret == NULL_RTX)
7622 return expand_expr (fold (value), target, tmode,
7626 else if(TREE_CODE (init) == STRING_CST)
7628 tree index1 = index;
7629 tree low_bound = array_ref_low_bound (exp);
7630 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7632 /* Optimize the special-case of a zero lower bound.
7634 We convert the low_bound to sizetype to avoid some problems
7635 with constant folding. (E.g. suppose the lower bound is 1,
7636 and its mode is QI. Without the conversion,l (ARRAY
7637 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7638 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7640 if (! integer_zerop (low_bound))
7641 index1 = size_diffop (index1, fold_convert (sizetype,
7644 if (0 > compare_tree_int (index1,
7645 TREE_STRING_LENGTH (init)))
7647 tree type = TREE_TYPE (TREE_TYPE (init));
7648 enum machine_mode mode = TYPE_MODE (type);
7650 if (GET_MODE_CLASS (mode) == MODE_INT
7651 && GET_MODE_SIZE (mode) == 1)
7652 return gen_int_mode (TREE_STRING_POINTER (init)
7653 [TREE_INT_CST_LOW (index1)],
7660 goto normal_inner_ref;
7663 /* If the operand is a CONSTRUCTOR, we can just extract the
7664 appropriate field if it is present. */
7665 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7667 unsigned HOST_WIDE_INT idx;
7670 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7672 if (field == TREE_OPERAND (exp, 1)
7673 /* We can normally use the value of the field in the
7674 CONSTRUCTOR. However, if this is a bitfield in
7675 an integral mode that we can fit in a HOST_WIDE_INT,
7676 we must mask only the number of bits in the bitfield,
7677 since this is done implicitly by the constructor. If
7678 the bitfield does not meet either of those conditions,
7679 we can't do this optimization. */
7680 && (! DECL_BIT_FIELD (field)
7681 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7682 && (GET_MODE_BITSIZE (DECL_MODE (field))
7683 <= HOST_BITS_PER_WIDE_INT))))
7685 if (DECL_BIT_FIELD (field)
7686 && modifier == EXPAND_STACK_PARM)
7688 op0 = expand_expr (value, target, tmode, modifier);
7689 if (DECL_BIT_FIELD (field))
7691 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7692 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7694 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7696 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7697 op0 = expand_and (imode, op0, op1, target);
7702 = build_int_cst (NULL_TREE,
7703 GET_MODE_BITSIZE (imode) - bitsize);
7705 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7707 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7715 goto normal_inner_ref;
7718 case ARRAY_RANGE_REF:
7721 enum machine_mode mode1;
7722 HOST_WIDE_INT bitsize, bitpos;
7725 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7726 &mode1, &unsignedp, &volatilep, true);
7729 /* If we got back the original object, something is wrong. Perhaps
7730 we are evaluating an expression too early. In any event, don't
7731 infinitely recurse. */
7732 gcc_assert (tem != exp);
7734 /* If TEM's type is a union of variable size, pass TARGET to the inner
7735 computation, since it will need a temporary and TARGET is known
7736 to have to do. This occurs in unchecked conversion in Ada. */
7740 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7741 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7743 && modifier != EXPAND_STACK_PARM
7744 ? target : NULL_RTX),
7746 (modifier == EXPAND_INITIALIZER
7747 || modifier == EXPAND_CONST_ADDRESS
7748 || modifier == EXPAND_STACK_PARM)
7749 ? modifier : EXPAND_NORMAL);
7751 /* If this is a constant, put it into a register if it is a legitimate
7752 constant, OFFSET is 0, and we won't try to extract outside the
7753 register (in case we were passed a partially uninitialized object
7754 or a view_conversion to a larger size) or a BLKmode piece of it
7755 (e.g. if it is unchecked-converted to a record type in Ada). Force
7756 the constant to memory otherwise. */
7757 if (CONSTANT_P (op0))
7759 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7760 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7763 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7764 op0 = force_reg (mode, op0);
7766 op0 = validize_mem (force_const_mem (mode, op0));
7769 /* Otherwise, if this object not in memory and we either have an
7770 offset, a BLKmode result, or a reference outside the object, put it
7771 there. Such cases can occur in Ada if we have unchecked conversion
7772 of an expression from a scalar type to an array or record type or
7773 for an ARRAY_RANGE_REF whose type is BLKmode. */
7774 else if (!MEM_P (op0)
7777 || (bitpos + bitsize
7778 > GET_MODE_BITSIZE (GET_MODE (op0)))))
7780 tree nt = build_qualified_type (TREE_TYPE (tem),
7781 (TYPE_QUALS (TREE_TYPE (tem))
7782 | TYPE_QUAL_CONST));
7783 rtx memloc = assign_temp (nt, 1, 1, 1);
7785 emit_move_insn (memloc, op0);
7791 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7794 gcc_assert (MEM_P (op0));
7796 #ifdef POINTERS_EXTEND_UNSIGNED
7797 if (GET_MODE (offset_rtx) != Pmode)
7798 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7800 if (GET_MODE (offset_rtx) != ptr_mode)
7801 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7804 if (GET_MODE (op0) == BLKmode
7805 /* A constant address in OP0 can have VOIDmode, we must
7806 not try to call force_reg in that case. */
7807 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7809 && (bitpos % bitsize) == 0
7810 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7811 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7813 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7817 op0 = offset_address (op0, offset_rtx,
7818 highest_pow2_factor (offset));
7821 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7822 record its alignment as BIGGEST_ALIGNMENT. */
7823 if (MEM_P (op0) && bitpos == 0 && offset != 0
7824 && is_aligning_offset (offset, tem))
7825 set_mem_align (op0, BIGGEST_ALIGNMENT);
7827 /* Don't forget about volatility even if this is a bitfield. */
7828 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7830 if (op0 == orig_op0)
7831 op0 = copy_rtx (op0);
7833 MEM_VOLATILE_P (op0) = 1;
7836 /* The following code doesn't handle CONCAT.
7837 Assume only bitpos == 0 can be used for CONCAT, due to
7838 one element arrays having the same mode as its element. */
7839 if (GET_CODE (op0) == CONCAT)
7841 gcc_assert (bitpos == 0
7842 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7846 /* In cases where an aligned union has an unaligned object
7847 as a field, we might be extracting a BLKmode value from
7848 an integer-mode (e.g., SImode) object. Handle this case
7849 by doing the extract into an object as wide as the field
7850 (which we know to be the width of a basic mode), then
7851 storing into memory, and changing the mode to BLKmode. */
7852 if (mode1 == VOIDmode
7853 || REG_P (op0) || GET_CODE (op0) == SUBREG
7854 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7855 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7856 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7857 && modifier != EXPAND_CONST_ADDRESS
7858 && modifier != EXPAND_INITIALIZER)
7859 /* If the field isn't aligned enough to fetch as a memref,
7860 fetch it as a bit field. */
7861 || (mode1 != BLKmode
7862 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7863 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7865 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7866 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7867 && ((modifier == EXPAND_CONST_ADDRESS
7868 || modifier == EXPAND_INITIALIZER)
7870 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7871 || (bitpos % BITS_PER_UNIT != 0)))
7872 /* If the type and the field are a constant size and the
7873 size of the type isn't the same size as the bitfield,
7874 we must use bitfield operations. */
7876 && TYPE_SIZE (TREE_TYPE (exp))
7877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7878 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7881 enum machine_mode ext_mode = mode;
7883 if (ext_mode == BLKmode
7884 && ! (target != 0 && MEM_P (op0)
7886 && bitpos % BITS_PER_UNIT == 0))
7887 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7889 if (ext_mode == BLKmode)
7892 target = assign_temp (type, 0, 1, 1);
7897 /* In this case, BITPOS must start at a byte boundary and
7898 TARGET, if specified, must be a MEM. */
7899 gcc_assert (MEM_P (op0)
7900 && (!target || MEM_P (target))
7901 && !(bitpos % BITS_PER_UNIT));
7903 emit_block_move (target,
7904 adjust_address (op0, VOIDmode,
7905 bitpos / BITS_PER_UNIT),
7906 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7908 (modifier == EXPAND_STACK_PARM
7909 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7914 op0 = validize_mem (op0);
7916 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7917 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7919 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7920 (modifier == EXPAND_STACK_PARM
7921 ? NULL_RTX : target),
7922 ext_mode, ext_mode);
7924 /* If the result is a record type and BITSIZE is narrower than
7925 the mode of OP0, an integral mode, and this is a big endian
7926 machine, we must put the field into the high-order bits. */
7927 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7928 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7929 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7930 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7931 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7935 /* If the result type is BLKmode, store the data into a temporary
7936 of the appropriate type, but with the mode corresponding to the
7937 mode for the data we have (op0's mode). It's tempting to make
7938 this a constant type, since we know it's only being stored once,
7939 but that can cause problems if we are taking the address of this
7940 COMPONENT_REF because the MEM of any reference via that address
7941 will have flags corresponding to the type, which will not
7942 necessarily be constant. */
7943 if (mode == BLKmode)
7945 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7948 /* If the reference doesn't use the alias set of its type,
7949 we cannot create the temporary using that type. */
7950 if (component_uses_parent_alias_set (exp))
7952 new = assign_stack_local (ext_mode, size, 0);
7953 set_mem_alias_set (new, get_alias_set (exp));
7956 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7958 emit_move_insn (new, op0);
7959 op0 = copy_rtx (new);
7960 PUT_MODE (op0, BLKmode);
7961 set_mem_attributes (op0, exp, 1);
7967 /* If the result is BLKmode, use that to access the object
7969 if (mode == BLKmode)
7972 /* Get a reference to just this component. */
7973 if (modifier == EXPAND_CONST_ADDRESS
7974 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7975 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7977 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7979 if (op0 == orig_op0)
7980 op0 = copy_rtx (op0);
7982 set_mem_attributes (op0, exp, 0);
7983 if (REG_P (XEXP (op0, 0)))
7984 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7986 MEM_VOLATILE_P (op0) |= volatilep;
7987 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7988 || modifier == EXPAND_CONST_ADDRESS
7989 || modifier == EXPAND_INITIALIZER)
7991 else if (target == 0)
7992 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7994 convert_move (target, op0, unsignedp);
7999 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8002 /* All valid uses of __builtin_va_arg_pack () are removed during
8004 if (CALL_EXPR_VA_ARG_PACK (exp))
8005 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8007 tree fndecl = get_callee_fndecl (exp), attr;
8010 && (attr = lookup_attribute ("error",
8011 DECL_ATTRIBUTES (fndecl))) != NULL)
8012 error ("%Kcall to %qs declared with attribute error: %s",
8013 exp, lang_hooks.decl_printable_name (fndecl, 1),
8014 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8016 && (attr = lookup_attribute ("warning",
8017 DECL_ATTRIBUTES (fndecl))) != NULL)
8018 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8019 exp, lang_hooks.decl_printable_name (fndecl, 1),
8020 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8022 /* Check for a built-in function. */
8023 if (fndecl && DECL_BUILT_IN (fndecl))
8025 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8026 return lang_hooks.expand_expr (exp, original_target,
8027 tmode, modifier, alt_rtl);
8029 return expand_builtin (exp, target, subtarget, tmode, ignore);
8032 return expand_call (exp, target, ignore);
8036 if (TREE_OPERAND (exp, 0) == error_mark_node)
8039 if (TREE_CODE (type) == UNION_TYPE)
8041 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8043 /* If both input and output are BLKmode, this conversion isn't doing
8044 anything except possibly changing memory attribute. */
8045 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8047 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8050 result = copy_rtx (result);
8051 set_mem_attributes (result, exp, 0);
8057 if (TYPE_MODE (type) != BLKmode)
8058 target = gen_reg_rtx (TYPE_MODE (type));
8060 target = assign_temp (type, 0, 1, 1);
8064 /* Store data into beginning of memory target. */
8065 store_expr (TREE_OPERAND (exp, 0),
8066 adjust_address (target, TYPE_MODE (valtype), 0),
8067 modifier == EXPAND_STACK_PARM,
8072 gcc_assert (REG_P (target));
8074 /* Store this field into a union of the proper type. */
8075 store_field (target,
8076 MIN ((int_size_in_bytes (TREE_TYPE
8077 (TREE_OPERAND (exp, 0)))
8079 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8080 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8084 /* Return the entire union. */
8088 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8093 /* If the signedness of the conversion differs and OP0 is
8094 a promoted SUBREG, clear that indication since we now
8095 have to do the proper extension. */
8096 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8097 && GET_CODE (op0) == SUBREG)
8098 SUBREG_PROMOTED_VAR_P (op0) = 0;
8100 return REDUCE_BIT_FIELD (op0);
8103 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8104 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8105 if (GET_MODE (op0) == mode)
8108 /* If OP0 is a constant, just convert it into the proper mode. */
8109 else if (CONSTANT_P (op0))
8111 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8112 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8114 if (modifier == EXPAND_INITIALIZER)
8115 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8116 subreg_lowpart_offset (mode,
8119 op0= convert_modes (mode, inner_mode, op0,
8120 TYPE_UNSIGNED (inner_type));
8123 else if (modifier == EXPAND_INITIALIZER)
8124 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8126 else if (target == 0)
8127 op0 = convert_to_mode (mode, op0,
8128 TYPE_UNSIGNED (TREE_TYPE
8129 (TREE_OPERAND (exp, 0))));
8132 convert_move (target, op0,
8133 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8137 return REDUCE_BIT_FIELD (op0);
8139 case VIEW_CONVERT_EXPR:
8140 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8142 /* If the input and output modes are both the same, we are done. */
8143 if (TYPE_MODE (type) == GET_MODE (op0))
8145 /* If neither mode is BLKmode, and both modes are the same size
8146 then we can use gen_lowpart. */
8147 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8148 && GET_MODE_SIZE (TYPE_MODE (type))
8149 == GET_MODE_SIZE (GET_MODE (op0)))
8151 if (GET_CODE (op0) == SUBREG)
8152 op0 = force_reg (GET_MODE (op0), op0);
8153 op0 = gen_lowpart (TYPE_MODE (type), op0);
8155 /* If both modes are integral, then we can convert from one to the
8157 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8158 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8159 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8160 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8161 /* As a last resort, spill op0 to memory, and reload it in a
8163 else if (!MEM_P (op0))
8165 /* If the operand is not a MEM, force it into memory. Since we
8166 are going to be changing the mode of the MEM, don't call
8167 force_const_mem for constants because we don't allow pool
8168 constants to change mode. */
8169 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8171 gcc_assert (!TREE_ADDRESSABLE (exp));
8173 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8175 = assign_stack_temp_for_type
8176 (TYPE_MODE (inner_type),
8177 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8179 emit_move_insn (target, op0);
8183 /* At this point, OP0 is in the correct mode. If the output type is such
8184 that the operand is known to be aligned, indicate that it is.
8185 Otherwise, we need only be concerned about alignment for non-BLKmode
8189 op0 = copy_rtx (op0);
8191 if (TYPE_ALIGN_OK (type))
8192 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8193 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8194 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8196 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8197 HOST_WIDE_INT temp_size
8198 = MAX (int_size_in_bytes (inner_type),
8199 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8200 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8201 temp_size, 0, type);
8202 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8204 gcc_assert (!TREE_ADDRESSABLE (exp));
8206 if (GET_MODE (op0) == BLKmode)
8207 emit_block_move (new_with_op0_mode, op0,
8208 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8209 (modifier == EXPAND_STACK_PARM
8210 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8212 emit_move_insn (new_with_op0_mode, op0);
8217 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8222 case POINTER_PLUS_EXPR:
8223 /* Even though the sizetype mode and the pointer's mode can be different
8224 expand is able to handle this correctly and get the correct result out
8225 of the PLUS_EXPR code. */
8228 /* Check if this is a case for multiplication and addition. */
8229 if ((TREE_CODE (type) == INTEGER_TYPE
8230 || TREE_CODE (type) == FIXED_POINT_TYPE)
8231 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8233 tree subsubexp0, subsubexp1;
8234 enum tree_code code0, code1, this_code;
8236 subexp0 = TREE_OPERAND (exp, 0);
8237 subsubexp0 = TREE_OPERAND (subexp0, 0);
8238 subsubexp1 = TREE_OPERAND (subexp0, 1);
8239 code0 = TREE_CODE (subsubexp0);
8240 code1 = TREE_CODE (subsubexp1);
8241 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8242 : FIXED_CONVERT_EXPR;
8243 if (code0 == this_code && code1 == this_code
8244 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8245 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8246 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8247 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8248 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8249 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8251 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8252 enum machine_mode innermode = TYPE_MODE (op0type);
8253 bool zextend_p = TYPE_UNSIGNED (op0type);
8254 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8256 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8258 this_optab = zextend_p ? usmadd_widen_optab
8259 : ssmadd_widen_optab;
8260 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8261 && (optab_handler (this_optab, mode)->insn_code
8262 != CODE_FOR_nothing))
8264 expand_operands (TREE_OPERAND (subsubexp0, 0),
8265 TREE_OPERAND (subsubexp1, 0),
8266 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8267 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8268 VOIDmode, EXPAND_NORMAL);
8269 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8272 return REDUCE_BIT_FIELD (temp);
8277 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8278 something else, make sure we add the register to the constant and
8279 then to the other thing. This case can occur during strength
8280 reduction and doing it this way will produce better code if the
8281 frame pointer or argument pointer is eliminated.
8283 fold-const.c will ensure that the constant is always in the inner
8284 PLUS_EXPR, so the only case we need to do anything about is if
8285 sp, ap, or fp is our second argument, in which case we must swap
8286 the innermost first argument and our second argument. */
8288 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8289 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8290 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8291 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8292 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8293 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8295 tree t = TREE_OPERAND (exp, 1);
8297 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8298 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8301 /* If the result is to be ptr_mode and we are adding an integer to
8302 something, we might be forming a constant. So try to use
8303 plus_constant. If it produces a sum and we can't accept it,
8304 use force_operand. This allows P = &ARR[const] to generate
8305 efficient code on machines where a SYMBOL_REF is not a valid
8308 If this is an EXPAND_SUM call, always return the sum. */
8309 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8310 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8312 if (modifier == EXPAND_STACK_PARM)
8314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8315 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8316 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8320 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8322 /* Use immed_double_const to ensure that the constant is
8323 truncated according to the mode of OP1, then sign extended
8324 to a HOST_WIDE_INT. Using the constant directly can result
8325 in non-canonical RTL in a 64x32 cross compile. */
8327 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8329 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8330 op1 = plus_constant (op1, INTVAL (constant_part));
8331 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8332 op1 = force_operand (op1, target);
8333 return REDUCE_BIT_FIELD (op1);
8336 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8337 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8338 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8343 (modifier == EXPAND_INITIALIZER
8344 ? EXPAND_INITIALIZER : EXPAND_SUM));
8345 if (! CONSTANT_P (op0))
8347 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8348 VOIDmode, modifier);
8349 /* Return a PLUS if modifier says it's OK. */
8350 if (modifier == EXPAND_SUM
8351 || modifier == EXPAND_INITIALIZER)
8352 return simplify_gen_binary (PLUS, mode, op0, op1);
8355 /* Use immed_double_const to ensure that the constant is
8356 truncated according to the mode of OP1, then sign extended
8357 to a HOST_WIDE_INT. Using the constant directly can result
8358 in non-canonical RTL in a 64x32 cross compile. */
8360 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8362 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8363 op0 = plus_constant (op0, INTVAL (constant_part));
8364 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8365 op0 = force_operand (op0, target);
8366 return REDUCE_BIT_FIELD (op0);
8370 /* No sense saving up arithmetic to be done
8371 if it's all in the wrong mode to form part of an address.
8372 And force_operand won't know whether to sign-extend or
8374 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8375 || mode != ptr_mode)
8377 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8378 subtarget, &op0, &op1, 0);
8379 if (op0 == const0_rtx)
8381 if (op1 == const0_rtx)
8386 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8387 subtarget, &op0, &op1, modifier);
8388 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8391 /* Check if this is a case for multiplication and subtraction. */
8392 if ((TREE_CODE (type) == INTEGER_TYPE
8393 || TREE_CODE (type) == FIXED_POINT_TYPE)
8394 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8396 tree subsubexp0, subsubexp1;
8397 enum tree_code code0, code1, this_code;
8399 subexp1 = TREE_OPERAND (exp, 1);
8400 subsubexp0 = TREE_OPERAND (subexp1, 0);
8401 subsubexp1 = TREE_OPERAND (subexp1, 1);
8402 code0 = TREE_CODE (subsubexp0);
8403 code1 = TREE_CODE (subsubexp1);
8404 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8405 : FIXED_CONVERT_EXPR;
8406 if (code0 == this_code && code1 == this_code
8407 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8408 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8409 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8410 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8411 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8412 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8414 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8415 enum machine_mode innermode = TYPE_MODE (op0type);
8416 bool zextend_p = TYPE_UNSIGNED (op0type);
8417 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8419 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8421 this_optab = zextend_p ? usmsub_widen_optab
8422 : ssmsub_widen_optab;
8423 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8424 && (optab_handler (this_optab, mode)->insn_code
8425 != CODE_FOR_nothing))
8427 expand_operands (TREE_OPERAND (subsubexp0, 0),
8428 TREE_OPERAND (subsubexp1, 0),
8429 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8430 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8431 VOIDmode, EXPAND_NORMAL);
8432 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8435 return REDUCE_BIT_FIELD (temp);
8440 /* For initializers, we are allowed to return a MINUS of two
8441 symbolic constants. Here we handle all cases when both operands
8443 /* Handle difference of two symbolic constants,
8444 for the sake of an initializer. */
8445 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8446 && really_constant_p (TREE_OPERAND (exp, 0))
8447 && really_constant_p (TREE_OPERAND (exp, 1)))
8449 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8450 NULL_RTX, &op0, &op1, modifier);
8452 /* If the last operand is a CONST_INT, use plus_constant of
8453 the negated constant. Else make the MINUS. */
8454 if (GET_CODE (op1) == CONST_INT)
8455 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8457 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8460 /* No sense saving up arithmetic to be done
8461 if it's all in the wrong mode to form part of an address.
8462 And force_operand won't know whether to sign-extend or
8464 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8465 || mode != ptr_mode)
8468 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8469 subtarget, &op0, &op1, modifier);
8471 /* Convert A - const to A + (-const). */
8472 if (GET_CODE (op1) == CONST_INT)
8474 op1 = negate_rtx (mode, op1);
8475 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8481 /* If this is a fixed-point operation, then we cannot use the code
8482 below because "expand_mult" doesn't support sat/no-sat fixed-point
8484 if (ALL_FIXED_POINT_MODE_P (mode))
8487 /* If first operand is constant, swap them.
8488 Thus the following special case checks need only
8489 check the second operand. */
8490 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8492 tree t1 = TREE_OPERAND (exp, 0);
8493 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8494 TREE_OPERAND (exp, 1) = t1;
8497 /* Attempt to return something suitable for generating an
8498 indexed address, for machines that support that. */
8500 if (modifier == EXPAND_SUM && mode == ptr_mode
8501 && host_integerp (TREE_OPERAND (exp, 1), 0))
8503 tree exp1 = TREE_OPERAND (exp, 1);
8505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8509 op0 = force_operand (op0, NULL_RTX);
8511 op0 = copy_to_mode_reg (mode, op0);
8513 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8514 gen_int_mode (tree_low_cst (exp1, 0),
8515 TYPE_MODE (TREE_TYPE (exp1)))));
8518 if (modifier == EXPAND_STACK_PARM)
8521 /* Check for multiplying things that have been extended
8522 from a narrower type. If this machine supports multiplying
8523 in that narrower type with a result in the desired type,
8524 do it that way, and avoid the explicit type-conversion. */
8526 subexp0 = TREE_OPERAND (exp, 0);
8527 subexp1 = TREE_OPERAND (exp, 1);
8528 /* First, check if we have a multiplication of one signed and one
8529 unsigned operand. */
8530 if (TREE_CODE (subexp0) == NOP_EXPR
8531 && TREE_CODE (subexp1) == NOP_EXPR
8532 && TREE_CODE (type) == INTEGER_TYPE
8533 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8534 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8535 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8536 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8537 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8538 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8540 enum machine_mode innermode
8541 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8542 this_optab = usmul_widen_optab;
8543 if (mode == GET_MODE_WIDER_MODE (innermode))
8545 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8547 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8548 expand_operands (TREE_OPERAND (subexp0, 0),
8549 TREE_OPERAND (subexp1, 0),
8550 NULL_RTX, &op0, &op1, 0);
8552 expand_operands (TREE_OPERAND (subexp0, 0),
8553 TREE_OPERAND (subexp1, 0),
8554 NULL_RTX, &op1, &op0, 0);
8560 /* Check for a multiplication with matching signedness. */
8561 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8562 && TREE_CODE (type) == INTEGER_TYPE
8563 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8564 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8565 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8566 && int_fits_type_p (TREE_OPERAND (exp, 1),
8567 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8568 /* Don't use a widening multiply if a shift will do. */
8569 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8570 > HOST_BITS_PER_WIDE_INT)
8571 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8573 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8574 && (TYPE_PRECISION (TREE_TYPE
8575 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8576 == TYPE_PRECISION (TREE_TYPE
8578 (TREE_OPERAND (exp, 0), 0))))
8579 /* If both operands are extended, they must either both
8580 be zero-extended or both be sign-extended. */
8581 && (TYPE_UNSIGNED (TREE_TYPE
8582 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8583 == TYPE_UNSIGNED (TREE_TYPE
8585 (TREE_OPERAND (exp, 0), 0)))))))
8587 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8588 enum machine_mode innermode = TYPE_MODE (op0type);
8589 bool zextend_p = TYPE_UNSIGNED (op0type);
8590 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8591 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8593 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8595 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8597 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8598 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8599 TREE_OPERAND (exp, 1),
8600 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8602 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8603 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8604 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8607 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8608 && innermode == word_mode)
8611 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8612 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8613 op1 = convert_modes (innermode, mode,
8614 expand_normal (TREE_OPERAND (exp, 1)),
8617 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8618 temp = expand_binop (mode, other_optab, op0, op1, target,
8619 unsignedp, OPTAB_LIB_WIDEN);
8620 hipart = gen_highpart (innermode, temp);
8621 htem = expand_mult_highpart_adjust (innermode, hipart,
8625 emit_move_insn (hipart, htem);
8626 return REDUCE_BIT_FIELD (temp);
8630 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8631 subtarget, &op0, &op1, 0);
8632 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8634 case TRUNC_DIV_EXPR:
8635 case FLOOR_DIV_EXPR:
8637 case ROUND_DIV_EXPR:
8638 case EXACT_DIV_EXPR:
8639 /* If this is a fixed-point operation, then we cannot use the code
8640 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8642 if (ALL_FIXED_POINT_MODE_P (mode))
8645 if (modifier == EXPAND_STACK_PARM)
8647 /* Possible optimization: compute the dividend with EXPAND_SUM
8648 then if the divisor is constant can optimize the case
8649 where some terms of the dividend have coeffs divisible by it. */
8650 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8651 subtarget, &op0, &op1, 0);
8652 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8657 case TRUNC_MOD_EXPR:
8658 case FLOOR_MOD_EXPR:
8660 case ROUND_MOD_EXPR:
8661 if (modifier == EXPAND_STACK_PARM)
8663 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8664 subtarget, &op0, &op1, 0);
8665 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8667 case FIXED_CONVERT_EXPR:
8668 op0 = expand_normal (TREE_OPERAND (exp, 0));
8669 if (target == 0 || modifier == EXPAND_STACK_PARM)
8670 target = gen_reg_rtx (mode);
8672 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8673 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8674 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8675 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8677 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8680 case FIX_TRUNC_EXPR:
8681 op0 = expand_normal (TREE_OPERAND (exp, 0));
8682 if (target == 0 || modifier == EXPAND_STACK_PARM)
8683 target = gen_reg_rtx (mode);
8684 expand_fix (target, op0, unsignedp);
8688 op0 = expand_normal (TREE_OPERAND (exp, 0));
8689 if (target == 0 || modifier == EXPAND_STACK_PARM)
8690 target = gen_reg_rtx (mode);
8691 /* expand_float can't figure out what to do if FROM has VOIDmode.
8692 So give it the correct mode. With -O, cse will optimize this. */
8693 if (GET_MODE (op0) == VOIDmode)
8694 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8696 expand_float (target, op0,
8697 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8701 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8702 VOIDmode, EXPAND_NORMAL);
8703 if (modifier == EXPAND_STACK_PARM)
8705 temp = expand_unop (mode,
8706 optab_for_tree_code (NEGATE_EXPR, type,
8710 return REDUCE_BIT_FIELD (temp);
8713 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8714 VOIDmode, EXPAND_NORMAL);
8715 if (modifier == EXPAND_STACK_PARM)
8718 /* ABS_EXPR is not valid for complex arguments. */
8719 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8720 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8722 /* Unsigned abs is simply the operand. Testing here means we don't
8723 risk generating incorrect code below. */
8724 if (TYPE_UNSIGNED (type))
8727 return expand_abs (mode, op0, target, unsignedp,
8728 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8732 target = original_target;
8734 || modifier == EXPAND_STACK_PARM
8735 || (MEM_P (target) && MEM_VOLATILE_P (target))
8736 || GET_MODE (target) != mode
8738 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8739 target = gen_reg_rtx (mode);
8740 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8741 target, &op0, &op1, 0);
8743 /* First try to do it with a special MIN or MAX instruction.
8744 If that does not win, use a conditional jump to select the proper
8746 this_optab = optab_for_tree_code (code, type, optab_default);
8747 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8752 /* At this point, a MEM target is no longer useful; we will get better
8755 if (! REG_P (target))
8756 target = gen_reg_rtx (mode);
8758 /* If op1 was placed in target, swap op0 and op1. */
8759 if (target != op0 && target == op1)
8766 /* We generate better code and avoid problems with op1 mentioning
8767 target by forcing op1 into a pseudo if it isn't a constant. */
8768 if (! CONSTANT_P (op1))
8769 op1 = force_reg (mode, op1);
8772 enum rtx_code comparison_code;
8775 if (code == MAX_EXPR)
8776 comparison_code = unsignedp ? GEU : GE;
8778 comparison_code = unsignedp ? LEU : LE;
8780 /* Canonicalize to comparisons against 0. */
8781 if (op1 == const1_rtx)
8783 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8784 or (a != 0 ? a : 1) for unsigned.
8785 For MIN we are safe converting (a <= 1 ? a : 1)
8786 into (a <= 0 ? a : 1) */
8787 cmpop1 = const0_rtx;
8788 if (code == MAX_EXPR)
8789 comparison_code = unsignedp ? NE : GT;
8791 if (op1 == constm1_rtx && !unsignedp)
8793 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8794 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8795 cmpop1 = const0_rtx;
8796 if (code == MIN_EXPR)
8797 comparison_code = LT;
8799 #ifdef HAVE_conditional_move
8800 /* Use a conditional move if possible. */
8801 if (can_conditionally_move_p (mode))
8805 /* ??? Same problem as in expmed.c: emit_conditional_move
8806 forces a stack adjustment via compare_from_rtx, and we
8807 lose the stack adjustment if the sequence we are about
8808 to create is discarded. */
8809 do_pending_stack_adjust ();
8813 /* Try to emit the conditional move. */
8814 insn = emit_conditional_move (target, comparison_code,
8819 /* If we could do the conditional move, emit the sequence,
8823 rtx seq = get_insns ();
8829 /* Otherwise discard the sequence and fall back to code with
8835 emit_move_insn (target, op0);
8837 temp = gen_label_rtx ();
8838 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8839 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8841 emit_move_insn (target, op1);
8846 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8847 VOIDmode, EXPAND_NORMAL);
8848 if (modifier == EXPAND_STACK_PARM)
8850 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8854 /* ??? Can optimize bitwise operations with one arg constant.
8855 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8856 and (a bitwise1 b) bitwise2 b (etc)
8857 but that is probably not worth while. */
8859 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8860 boolean values when we want in all cases to compute both of them. In
8861 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8862 as actual zero-or-1 values and then bitwise anding. In cases where
8863 there cannot be any side effects, better code would be made by
8864 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8865 how to recognize those cases. */
8867 case TRUTH_AND_EXPR:
8868 code = BIT_AND_EXPR;
8873 code = BIT_IOR_EXPR;
8877 case TRUTH_XOR_EXPR:
8878 code = BIT_XOR_EXPR;
8884 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8885 || (GET_MODE_PRECISION (TYPE_MODE (type))
8886 == TYPE_PRECISION (type)));
8891 /* If this is a fixed-point operation, then we cannot use the code
8892 below because "expand_shift" doesn't support sat/no-sat fixed-point
8894 if (ALL_FIXED_POINT_MODE_P (mode))
8897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8899 if (modifier == EXPAND_STACK_PARM)
8901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8902 VOIDmode, EXPAND_NORMAL);
8903 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8905 if (code == LSHIFT_EXPR)
8906 temp = REDUCE_BIT_FIELD (temp);
8909 /* Could determine the answer when only additive constants differ. Also,
8910 the addition of one can be handled by changing the condition. */
8917 case UNORDERED_EXPR:
8925 temp = do_store_flag (exp,
8926 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8927 tmode != VOIDmode ? tmode : mode, 0);
8931 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8932 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8934 && REG_P (original_target)
8935 && (GET_MODE (original_target)
8936 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8938 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8939 VOIDmode, EXPAND_NORMAL);
8941 /* If temp is constant, we can just compute the result. */
8942 if (GET_CODE (temp) == CONST_INT)
8944 if (INTVAL (temp) != 0)
8945 emit_move_insn (target, const1_rtx);
8947 emit_move_insn (target, const0_rtx);
8952 if (temp != original_target)
8954 enum machine_mode mode1 = GET_MODE (temp);
8955 if (mode1 == VOIDmode)
8956 mode1 = tmode != VOIDmode ? tmode : mode;
8958 temp = copy_to_mode_reg (mode1, temp);
8961 op1 = gen_label_rtx ();
8962 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8963 GET_MODE (temp), unsignedp, op1);
8964 emit_move_insn (temp, const1_rtx);
8969 /* If no set-flag instruction, must generate a conditional store
8970 into a temporary variable. Drop through and handle this
8975 || modifier == EXPAND_STACK_PARM
8976 || ! safe_from_p (target, exp, 1)
8977 /* Make sure we don't have a hard reg (such as function's return
8978 value) live across basic blocks, if not optimizing. */
8979 || (!optimize && REG_P (target)
8980 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8981 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8984 emit_move_insn (target, const0_rtx);
8986 op1 = gen_label_rtx ();
8987 jumpifnot (exp, op1);
8990 emit_move_insn (target, const1_rtx);
8993 return ignore ? const0_rtx : target;
8995 case TRUTH_NOT_EXPR:
8996 if (modifier == EXPAND_STACK_PARM)
8998 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8999 VOIDmode, EXPAND_NORMAL);
9000 /* The parser is careful to generate TRUTH_NOT_EXPR
9001 only with operands that are always zero or one. */
9002 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9003 target, 1, OPTAB_LIB_WIDEN);
9007 case STATEMENT_LIST:
9009 tree_stmt_iterator iter;
9011 gcc_assert (ignore);
9013 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9014 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9019 /* A COND_EXPR with its type being VOID_TYPE represents a
9020 conditional jump and is handled in
9021 expand_gimple_cond_expr. */
9022 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9024 /* Note that COND_EXPRs whose type is a structure or union
9025 are required to be constructed to contain assignments of
9026 a temporary variable, so that we can evaluate them here
9027 for side effect only. If type is void, we must do likewise. */
9029 gcc_assert (!TREE_ADDRESSABLE (type)
9031 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9032 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9034 /* If we are not to produce a result, we have no target. Otherwise,
9035 if a target was specified use it; it will not be used as an
9036 intermediate target unless it is safe. If no target, use a
9039 if (modifier != EXPAND_STACK_PARM
9041 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9042 && GET_MODE (original_target) == mode
9043 #ifdef HAVE_conditional_move
9044 && (! can_conditionally_move_p (mode)
9045 || REG_P (original_target))
9047 && !MEM_P (original_target))
9048 temp = original_target;
9050 temp = assign_temp (type, 0, 0, 1);
9052 do_pending_stack_adjust ();
9054 op0 = gen_label_rtx ();
9055 op1 = gen_label_rtx ();
9056 jumpifnot (TREE_OPERAND (exp, 0), op0);
9057 store_expr (TREE_OPERAND (exp, 1), temp,
9058 modifier == EXPAND_STACK_PARM,
9061 emit_jump_insn (gen_jump (op1));
9064 store_expr (TREE_OPERAND (exp, 2), temp,
9065 modifier == EXPAND_STACK_PARM,
9073 target = expand_vec_cond_expr (exp, target);
9078 tree lhs = TREE_OPERAND (exp, 0);
9079 tree rhs = TREE_OPERAND (exp, 1);
9080 gcc_assert (ignore);
9082 /* Check for |= or &= of a bitfield of size one into another bitfield
9083 of size 1. In this case, (unless we need the result of the
9084 assignment) we can do this more efficiently with a
9085 test followed by an assignment, if necessary.
9087 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9088 things change so we do, this code should be enhanced to
9090 if (TREE_CODE (lhs) == COMPONENT_REF
9091 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9092 || TREE_CODE (rhs) == BIT_AND_EXPR)
9093 && TREE_OPERAND (rhs, 0) == lhs
9094 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9095 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9096 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9098 rtx label = gen_label_rtx ();
9099 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9100 do_jump (TREE_OPERAND (rhs, 1),
9103 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9104 MOVE_NONTEMPORAL (exp));
9105 do_pending_stack_adjust ();
9110 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9115 if (!TREE_OPERAND (exp, 0))
9116 expand_null_return ();
9118 expand_return (TREE_OPERAND (exp, 0));
9122 return expand_expr_addr_expr (exp, target, tmode, modifier);
9125 /* Get the rtx code of the operands. */
9126 op0 = expand_normal (TREE_OPERAND (exp, 0));
9127 op1 = expand_normal (TREE_OPERAND (exp, 1));
9130 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9132 /* Move the real (op0) and imaginary (op1) parts to their location. */
9133 write_complex_part (target, op0, false);
9134 write_complex_part (target, op1, true);
9139 op0 = expand_normal (TREE_OPERAND (exp, 0));
9140 return read_complex_part (op0, false);
9143 op0 = expand_normal (TREE_OPERAND (exp, 0));
9144 return read_complex_part (op0, true);
9147 expand_resx_expr (exp);
9150 case TRY_CATCH_EXPR:
9152 case EH_FILTER_EXPR:
9153 case TRY_FINALLY_EXPR:
9154 /* Lowered by tree-eh.c. */
9157 case WITH_CLEANUP_EXPR:
9158 case CLEANUP_POINT_EXPR:
9160 case CASE_LABEL_EXPR:
9166 case PREINCREMENT_EXPR:
9167 case PREDECREMENT_EXPR:
9168 case POSTINCREMENT_EXPR:
9169 case POSTDECREMENT_EXPR:
9172 case TRUTH_ANDIF_EXPR:
9173 case TRUTH_ORIF_EXPR:
9174 /* Lowered by gimplify.c. */
9177 case CHANGE_DYNAMIC_TYPE_EXPR:
9178 /* This is ignored at the RTL level. The tree level set
9179 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9180 overkill for the RTL layer but is all that we can
9185 return get_exception_pointer ();
9188 return get_exception_filter ();
9191 /* Function descriptors are not valid except for as
9192 initialization constants, and should not be expanded. */
9200 expand_label (TREE_OPERAND (exp, 0));
9204 expand_asm_expr (exp);
9207 case WITH_SIZE_EXPR:
9208 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9209 have pulled out the size to use in whatever context it needed. */
9210 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9213 case REALIGN_LOAD_EXPR:
9215 tree oprnd0 = TREE_OPERAND (exp, 0);
9216 tree oprnd1 = TREE_OPERAND (exp, 1);
9217 tree oprnd2 = TREE_OPERAND (exp, 2);
9220 this_optab = optab_for_tree_code (code, type, optab_default);
9221 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9222 op2 = expand_normal (oprnd2);
9223 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9231 tree oprnd0 = TREE_OPERAND (exp, 0);
9232 tree oprnd1 = TREE_OPERAND (exp, 1);
9233 tree oprnd2 = TREE_OPERAND (exp, 2);
9236 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9237 op2 = expand_normal (oprnd2);
9238 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9243 case WIDEN_SUM_EXPR:
9245 tree oprnd0 = TREE_OPERAND (exp, 0);
9246 tree oprnd1 = TREE_OPERAND (exp, 1);
9248 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9249 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9254 case REDUC_MAX_EXPR:
9255 case REDUC_MIN_EXPR:
9256 case REDUC_PLUS_EXPR:
9258 op0 = expand_normal (TREE_OPERAND (exp, 0));
9259 this_optab = optab_for_tree_code (code, type, optab_default);
9260 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9265 case VEC_EXTRACT_EVEN_EXPR:
9266 case VEC_EXTRACT_ODD_EXPR:
9268 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9269 NULL_RTX, &op0, &op1, 0);
9270 this_optab = optab_for_tree_code (code, type, optab_default);
9271 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9277 case VEC_INTERLEAVE_HIGH_EXPR:
9278 case VEC_INTERLEAVE_LOW_EXPR:
9280 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9281 NULL_RTX, &op0, &op1, 0);
9282 this_optab = optab_for_tree_code (code, type, optab_default);
9283 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9289 case VEC_LSHIFT_EXPR:
9290 case VEC_RSHIFT_EXPR:
9292 target = expand_vec_shift_expr (exp, target);
9296 case VEC_UNPACK_HI_EXPR:
9297 case VEC_UNPACK_LO_EXPR:
9299 op0 = expand_normal (TREE_OPERAND (exp, 0));
9300 this_optab = optab_for_tree_code (code, type, optab_default);
9301 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9307 case VEC_UNPACK_FLOAT_HI_EXPR:
9308 case VEC_UNPACK_FLOAT_LO_EXPR:
9310 op0 = expand_normal (TREE_OPERAND (exp, 0));
9311 /* The signedness is determined from input operand. */
9312 this_optab = optab_for_tree_code (code,
9313 TREE_TYPE (TREE_OPERAND (exp, 0)),
9315 temp = expand_widen_pattern_expr
9316 (exp, op0, NULL_RTX, NULL_RTX,
9317 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9323 case VEC_WIDEN_MULT_HI_EXPR:
9324 case VEC_WIDEN_MULT_LO_EXPR:
9326 tree oprnd0 = TREE_OPERAND (exp, 0);
9327 tree oprnd1 = TREE_OPERAND (exp, 1);
9329 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9330 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9332 gcc_assert (target);
9336 case VEC_PACK_TRUNC_EXPR:
9337 case VEC_PACK_SAT_EXPR:
9338 case VEC_PACK_FIX_TRUNC_EXPR:
9339 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9343 return lang_hooks.expand_expr (exp, original_target, tmode,
9347 /* Here to do an ordinary binary operator. */
9349 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9350 subtarget, &op0, &op1, 0);
9352 this_optab = optab_for_tree_code (code, type, optab_default);
9354 if (modifier == EXPAND_STACK_PARM)
9356 temp = expand_binop (mode, this_optab, op0, op1, target,
9357 unsignedp, OPTAB_LIB_WIDEN);
9359 return REDUCE_BIT_FIELD (temp);
9361 #undef REDUCE_BIT_FIELD
9363 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9364 signedness of TYPE), possibly returning the result in TARGET. */
9366 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9368 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9369 if (target && GET_MODE (target) != GET_MODE (exp))
9371 /* For constant values, reduce using build_int_cst_type. */
9372 if (GET_CODE (exp) == CONST_INT)
9374 HOST_WIDE_INT value = INTVAL (exp);
9375 tree t = build_int_cst_type (type, value);
9376 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9378 else if (TYPE_UNSIGNED (type))
9381 if (prec < HOST_BITS_PER_WIDE_INT)
9382 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9385 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9386 ((unsigned HOST_WIDE_INT) 1
9387 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9389 return expand_and (GET_MODE (exp), exp, mask, target);
9393 tree count = build_int_cst (NULL_TREE,
9394 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9395 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9396 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9400 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9401 when applied to the address of EXP produces an address known to be
9402 aligned more than BIGGEST_ALIGNMENT. */
9405 is_aligning_offset (const_tree offset, const_tree exp)
9407 /* Strip off any conversions. */
9408 while (CONVERT_EXPR_P (offset))
9409 offset = TREE_OPERAND (offset, 0);
9411 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9412 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9413 if (TREE_CODE (offset) != BIT_AND_EXPR
9414 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9415 || compare_tree_int (TREE_OPERAND (offset, 1),
9416 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9417 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9420 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9421 It must be NEGATE_EXPR. Then strip any more conversions. */
9422 offset = TREE_OPERAND (offset, 0);
9423 while (CONVERT_EXPR_P (offset))
9424 offset = TREE_OPERAND (offset, 0);
9426 if (TREE_CODE (offset) != NEGATE_EXPR)
9429 offset = TREE_OPERAND (offset, 0);
9430 while (CONVERT_EXPR_P (offset))
9431 offset = TREE_OPERAND (offset, 0);
9433 /* This must now be the address of EXP. */
9434 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9437 /* Return the tree node if an ARG corresponds to a string constant or zero
9438 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9439 in bytes within the string that ARG is accessing. The type of the
9440 offset will be `sizetype'. */
9443 string_constant (tree arg, tree *ptr_offset)
9445 tree array, offset, lower_bound;
9448 if (TREE_CODE (arg) == ADDR_EXPR)
9450 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9452 *ptr_offset = size_zero_node;
9453 return TREE_OPERAND (arg, 0);
9455 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9457 array = TREE_OPERAND (arg, 0);
9458 offset = size_zero_node;
9460 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9462 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9463 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9464 if (TREE_CODE (array) != STRING_CST
9465 && TREE_CODE (array) != VAR_DECL)
9468 /* Check if the array has a nonzero lower bound. */
9469 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9470 if (!integer_zerop (lower_bound))
9472 /* If the offset and base aren't both constants, return 0. */
9473 if (TREE_CODE (lower_bound) != INTEGER_CST)
9475 if (TREE_CODE (offset) != INTEGER_CST)
9477 /* Adjust offset by the lower bound. */
9478 offset = size_diffop (fold_convert (sizetype, offset),
9479 fold_convert (sizetype, lower_bound));
9485 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9487 tree arg0 = TREE_OPERAND (arg, 0);
9488 tree arg1 = TREE_OPERAND (arg, 1);
9493 if (TREE_CODE (arg0) == ADDR_EXPR
9494 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9495 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9497 array = TREE_OPERAND (arg0, 0);
9500 else if (TREE_CODE (arg1) == ADDR_EXPR
9501 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9502 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9504 array = TREE_OPERAND (arg1, 0);
9513 if (TREE_CODE (array) == STRING_CST)
9515 *ptr_offset = fold_convert (sizetype, offset);
9518 else if (TREE_CODE (array) == VAR_DECL)
9522 /* Variables initialized to string literals can be handled too. */
9523 if (DECL_INITIAL (array) == NULL_TREE
9524 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9527 /* If they are read-only, non-volatile and bind locally. */
9528 if (! TREE_READONLY (array)
9529 || TREE_SIDE_EFFECTS (array)
9530 || ! targetm.binds_local_p (array))
9533 /* Avoid const char foo[4] = "abcde"; */
9534 if (DECL_SIZE_UNIT (array) == NULL_TREE
9535 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9536 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9537 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9540 /* If variable is bigger than the string literal, OFFSET must be constant
9541 and inside of the bounds of the string literal. */
9542 offset = fold_convert (sizetype, offset);
9543 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9544 && (! host_integerp (offset, 1)
9545 || compare_tree_int (offset, length) >= 0))
9548 *ptr_offset = offset;
9549 return DECL_INITIAL (array);
9555 /* Generate code to calculate EXP using a store-flag instruction
9556 and return an rtx for the result. EXP is either a comparison
9557 or a TRUTH_NOT_EXPR whose operand is a comparison.
9559 If TARGET is nonzero, store the result there if convenient.
9561 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9564 Return zero if there is no suitable set-flag instruction
9565 available on this machine.
9567 Once expand_expr has been called on the arguments of the comparison,
9568 we are committed to doing the store flag, since it is not safe to
9569 re-evaluate the expression. We emit the store-flag insn by calling
9570 emit_store_flag, but only expand the arguments if we have a reason
9571 to believe that emit_store_flag will be successful. If we think that
9572 it will, but it isn't, we have to simulate the store-flag with a
9573 set/jump/set sequence. */
9576 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9579 tree arg0, arg1, type;
9581 enum machine_mode operand_mode;
9585 enum insn_code icode;
9586 rtx subtarget = target;
9589 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9590 result at the end. We can't simply invert the test since it would
9591 have already been inverted if it were valid. This case occurs for
9592 some floating-point comparisons. */
9594 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9595 invert = 1, exp = TREE_OPERAND (exp, 0);
9597 arg0 = TREE_OPERAND (exp, 0);
9598 arg1 = TREE_OPERAND (exp, 1);
9600 /* Don't crash if the comparison was erroneous. */
9601 if (arg0 == error_mark_node || arg1 == error_mark_node)
9604 type = TREE_TYPE (arg0);
9605 operand_mode = TYPE_MODE (type);
9606 unsignedp = TYPE_UNSIGNED (type);
9608 /* We won't bother with BLKmode store-flag operations because it would mean
9609 passing a lot of information to emit_store_flag. */
9610 if (operand_mode == BLKmode)
9613 /* We won't bother with store-flag operations involving function pointers
9614 when function pointers must be canonicalized before comparisons. */
9615 #ifdef HAVE_canonicalize_funcptr_for_compare
9616 if (HAVE_canonicalize_funcptr_for_compare
9617 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9618 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9620 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9621 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9622 == FUNCTION_TYPE))))
9629 /* Get the rtx comparison code to use. We know that EXP is a comparison
9630 operation of some type. Some comparisons against 1 and -1 can be
9631 converted to comparisons with zero. Do so here so that the tests
9632 below will be aware that we have a comparison with zero. These
9633 tests will not catch constants in the first operand, but constants
9634 are rarely passed as the first operand. */
9636 switch (TREE_CODE (exp))
9645 if (integer_onep (arg1))
9646 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9648 code = unsignedp ? LTU : LT;
9651 if (! unsignedp && integer_all_onesp (arg1))
9652 arg1 = integer_zero_node, code = LT;
9654 code = unsignedp ? LEU : LE;
9657 if (! unsignedp && integer_all_onesp (arg1))
9658 arg1 = integer_zero_node, code = GE;
9660 code = unsignedp ? GTU : GT;
9663 if (integer_onep (arg1))
9664 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9666 code = unsignedp ? GEU : GE;
9669 case UNORDERED_EXPR:
9698 /* Put a constant second. */
9699 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9700 || TREE_CODE (arg0) == FIXED_CST)
9702 tem = arg0; arg0 = arg1; arg1 = tem;
9703 code = swap_condition (code);
9706 /* If this is an equality or inequality test of a single bit, we can
9707 do this by shifting the bit being tested to the low-order bit and
9708 masking the result with the constant 1. If the condition was EQ,
9709 we xor it with 1. This does not require an scc insn and is faster
9710 than an scc insn even if we have it.
9712 The code to make this transformation was moved into fold_single_bit_test,
9713 so we just call into the folder and expand its result. */
9715 if ((code == NE || code == EQ)
9716 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9717 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9719 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9720 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9722 target, VOIDmode, EXPAND_NORMAL);
9725 /* Now see if we are likely to be able to do this. Return if not. */
9726 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9729 icode = setcc_gen_code[(int) code];
9731 if (icode == CODE_FOR_nothing)
9733 enum machine_mode wmode;
9735 for (wmode = operand_mode;
9736 icode == CODE_FOR_nothing && wmode != VOIDmode;
9737 wmode = GET_MODE_WIDER_MODE (wmode))
9738 icode = optab_handler (cstore_optab, wmode)->insn_code;
9741 if (icode == CODE_FOR_nothing
9742 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9744 /* We can only do this if it is one of the special cases that
9745 can be handled without an scc insn. */
9746 if ((code == LT && integer_zerop (arg1))
9747 || (! only_cheap && code == GE && integer_zerop (arg1)))
9749 else if (! only_cheap && (code == NE || code == EQ)
9750 && TREE_CODE (type) != REAL_TYPE
9751 && ((optab_handler (abs_optab, operand_mode)->insn_code
9752 != CODE_FOR_nothing)
9753 || (optab_handler (ffs_optab, operand_mode)->insn_code
9754 != CODE_FOR_nothing)))
9760 if (! get_subtarget (target)
9761 || GET_MODE (subtarget) != operand_mode)
9764 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9767 target = gen_reg_rtx (mode);
9769 result = emit_store_flag (target, code, op0, op1,
9770 operand_mode, unsignedp, 1);
9775 result = expand_binop (mode, xor_optab, result, const1_rtx,
9776 result, 0, OPTAB_LIB_WIDEN);
9780 /* If this failed, we have to do this with set/compare/jump/set code. */
9782 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9783 target = gen_reg_rtx (GET_MODE (target));
9785 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9786 label = gen_label_rtx ();
9787 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9790 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9797 /* Stubs in case we haven't got a casesi insn. */
9799 # define HAVE_casesi 0
9800 # define gen_casesi(a, b, c, d, e) (0)
9801 # define CODE_FOR_casesi CODE_FOR_nothing
9804 /* If the machine does not have a case insn that compares the bounds,
9805 this means extra overhead for dispatch tables, which raises the
9806 threshold for using them. */
9807 #ifndef CASE_VALUES_THRESHOLD
9808 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9809 #endif /* CASE_VALUES_THRESHOLD */
9812 case_values_threshold (void)
9814 return CASE_VALUES_THRESHOLD;
9817 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9818 0 otherwise (i.e. if there is no casesi instruction). */
9820 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9821 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9822 rtx fallback_label ATTRIBUTE_UNUSED)
9824 enum machine_mode index_mode = SImode;
9825 int index_bits = GET_MODE_BITSIZE (index_mode);
9826 rtx op1, op2, index;
9827 enum machine_mode op_mode;
9832 /* Convert the index to SImode. */
9833 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9835 enum machine_mode omode = TYPE_MODE (index_type);
9836 rtx rangertx = expand_normal (range);
9838 /* We must handle the endpoints in the original mode. */
9839 index_expr = build2 (MINUS_EXPR, index_type,
9840 index_expr, minval);
9841 minval = integer_zero_node;
9842 index = expand_normal (index_expr);
9844 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9845 omode, 1, default_label);
9846 /* Now we can safely truncate. */
9847 index = convert_to_mode (index_mode, index, 0);
9851 if (TYPE_MODE (index_type) != index_mode)
9853 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9854 index_expr = fold_convert (index_type, index_expr);
9857 index = expand_normal (index_expr);
9860 do_pending_stack_adjust ();
9862 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9863 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9865 index = copy_to_mode_reg (op_mode, index);
9867 op1 = expand_normal (minval);
9869 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9870 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9871 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9872 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9874 op1 = copy_to_mode_reg (op_mode, op1);
9876 op2 = expand_normal (range);
9878 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9879 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9880 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9881 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9883 op2 = copy_to_mode_reg (op_mode, op2);
9885 emit_jump_insn (gen_casesi (index, op1, op2,
9886 table_label, !default_label
9887 ? fallback_label : default_label));
9891 /* Attempt to generate a tablejump instruction; same concept. */
9892 #ifndef HAVE_tablejump
9893 #define HAVE_tablejump 0
9894 #define gen_tablejump(x, y) (0)
9897 /* Subroutine of the next function.
9899 INDEX is the value being switched on, with the lowest value
9900 in the table already subtracted.
9901 MODE is its expected mode (needed if INDEX is constant).
9902 RANGE is the length of the jump table.
9903 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9905 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9906 index value is out of range. */
9909 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9914 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
9915 cfun->cfg->max_jumptable_ents = INTVAL (range);
9917 /* Do an unsigned comparison (in the proper mode) between the index
9918 expression and the value which represents the length of the range.
9919 Since we just finished subtracting the lower bound of the range
9920 from the index expression, this comparison allows us to simultaneously
9921 check that the original index expression value is both greater than
9922 or equal to the minimum value of the range and less than or equal to
9923 the maximum value of the range. */
9926 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9929 /* If index is in range, it must fit in Pmode.
9930 Convert to Pmode so we can index with it. */
9932 index = convert_to_mode (Pmode, index, 1);
9934 /* Don't let a MEM slip through, because then INDEX that comes
9935 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9936 and break_out_memory_refs will go to work on it and mess it up. */
9937 #ifdef PIC_CASE_VECTOR_ADDRESS
9938 if (flag_pic && !REG_P (index))
9939 index = copy_to_mode_reg (Pmode, index);
9942 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9943 GET_MODE_SIZE, because this indicates how large insns are. The other
9944 uses should all be Pmode, because they are addresses. This code
9945 could fail if addresses and insns are not the same size. */
9946 index = gen_rtx_PLUS (Pmode,
9947 gen_rtx_MULT (Pmode, index,
9948 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9949 gen_rtx_LABEL_REF (Pmode, table_label));
9950 #ifdef PIC_CASE_VECTOR_ADDRESS
9952 index = PIC_CASE_VECTOR_ADDRESS (index);
9955 index = memory_address (CASE_VECTOR_MODE, index);
9956 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9957 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9958 convert_move (temp, vector, 0);
9960 emit_jump_insn (gen_tablejump (temp, table_label));
9962 /* If we are generating PIC code or if the table is PC-relative, the
9963 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9964 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9969 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9970 rtx table_label, rtx default_label)
9974 if (! HAVE_tablejump)
9977 index_expr = fold_build2 (MINUS_EXPR, index_type,
9978 fold_convert (index_type, index_expr),
9979 fold_convert (index_type, minval));
9980 index = expand_normal (index_expr);
9981 do_pending_stack_adjust ();
9983 do_tablejump (index, TYPE_MODE (index_type),
9984 convert_modes (TYPE_MODE (index_type),
9985 TYPE_MODE (TREE_TYPE (range)),
9986 expand_normal (range),
9987 TYPE_UNSIGNED (TREE_TYPE (range))),
9988 table_label, default_label);
9992 /* Nonzero if the mode is a valid vector mode for this architecture.
9993 This returns nonzero even if there is no hardware support for the
9994 vector mode, but we can emulate with narrower modes. */
9997 vector_mode_valid_p (enum machine_mode mode)
9999 enum mode_class class = GET_MODE_CLASS (mode);
10000 enum machine_mode innermode;
10002 /* Doh! What's going on? */
10003 if (class != MODE_VECTOR_INT
10004 && class != MODE_VECTOR_FLOAT
10005 && class != MODE_VECTOR_FRACT
10006 && class != MODE_VECTOR_UFRACT
10007 && class != MODE_VECTOR_ACCUM
10008 && class != MODE_VECTOR_UACCUM)
10011 /* Hardware support. Woo hoo! */
10012 if (targetm.vector_mode_supported_p (mode))
10015 innermode = GET_MODE_INNER (mode);
10017 /* We should probably return 1 if requesting V4DI and we have no DI,
10018 but we have V2DI, but this is probably very unlikely. */
10020 /* If we have support for the inner mode, we can safely emulate it.
10021 We may not have V2DI, but me can emulate with a pair of DIs. */
10022 return targetm.scalar_mode_supported_p (innermode);
10025 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10027 const_vector_from_tree (tree exp)
10032 enum machine_mode inner, mode;
10034 mode = TYPE_MODE (TREE_TYPE (exp));
10036 if (initializer_zerop (exp))
10037 return CONST0_RTX (mode);
10039 units = GET_MODE_NUNITS (mode);
10040 inner = GET_MODE_INNER (mode);
10042 v = rtvec_alloc (units);
10044 link = TREE_VECTOR_CST_ELTS (exp);
10045 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10047 elt = TREE_VALUE (link);
10049 if (TREE_CODE (elt) == REAL_CST)
10050 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10052 else if (TREE_CODE (elt) == FIXED_CST)
10053 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10056 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10057 TREE_INT_CST_HIGH (elt),
10061 /* Initialize remaining elements to 0. */
10062 for (; i < units; ++i)
10063 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10065 return gen_rtx_CONST_VECTOR (mode, v);
10067 #include "gt-expr.h"